You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by ch...@apache.org on 2015/06/24 16:15:02 UTC

[1/5] airavata git commit: fixing compilation issues

Repository: airavata
Updated Branches:
  refs/heads/master a2dce4f8c -> df3fbe6aa


http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
index 665934e..9532c53 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
@@ -1,562 +1,562 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.util;
-
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.impl.HPCRemoteCluster;
-import org.apache.airavata.registry.cpi.AppCatalog;
-import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.credential.store.credential.impl.ssh.SSHCredential;
-import org.apache.airavata.gfac.core.GFacConstants;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.RequestData;
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsi.ssh.impl.GSISSHAbstractCluster;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.gfac.ssh.context.SSHAuthWrapper;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.*;
-import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
-import org.apache.airavata.model.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.*;
-
-public class GFACSSHUtils {
-    private final static Logger logger = LoggerFactory.getLogger(GFACSSHUtils.class);
-
-    public static Map<String, List<RemoteCluster>> clusters = new HashMap<String, List<RemoteCluster>>();
-
-    public static final String PBS_JOB_MANAGER = "pbs";
-    public static final String SLURM_JOB_MANAGER = "slurm";
-    public static final String SUN_GRID_ENGINE_JOB_MANAGER = "UGE";
-    public static final String LSF_JOB_MANAGER = "LSF";
-
-    public static int maxClusterCount = 5;
-
-    /**
-     * This method is to add computing resource specific authentication, if its a third party machine, use the other addSecurityContext
-     * @param jobExecutionContext
-     * @throws GFacException
-     * @throws ApplicationSettingsException
-     */
-    public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-        JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
-        JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
-        if (preferredJobSubmissionProtocol == JobSubmissionProtocol.GLOBUS || preferredJobSubmissionProtocol == JobSubmissionProtocol.UNICORE) {
-            logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-        } else if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
-            try {
-                AppCatalog appCatalog = jobExecutionContext.getAppCatalog();
-                SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
-                SecurityProtocol securityProtocol = sshJobSubmission.getSecurityProtocol();
-                if (securityProtocol == SecurityProtocol.GSI || securityProtocol == SecurityProtocol.SSH_KEYS) {
-                    SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-                    String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-                    RequestData requestData = new RequestData(jobExecutionContext.getGatewayID());
-                    requestData.setTokenId(credentialStoreToken);
-
-                    ServerInfo serverInfo = new ServerInfo(null, jobExecutionContext.getHostName());
-
-                    RemoteCluster pbsRemoteCluster = null;
-                    try {
-                        AuthenticationInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
-                        String installedParentPath = jobExecutionContext.getResourceJobManager().getJobManagerBinPath();
-                        if (installedParentPath == null) {
-                            installedParentPath = "/";
-                        }
-
-                        SSHCredential credentials =((TokenizedSSHAuthInfo)tokenizedSSHAuthInfo).getCredentials();// this is just a call to get and set credentials in to this object,data will be used
-                        if(credentials.getPrivateKey()==null || credentials.getPublicKey()==null){
-                            // now we fall back to username password authentication
-                            Properties configurationProperties = ServerSettings.getProperties();
-                            tokenizedSSHAuthInfo = new DefaultPasswordAuthenticationInfo(configurationProperties.getProperty(GFacConstants.SSH_PASSWORD));
-                        }
-                        // This should be the login user name from compute resource preference
-                        String loginUser = jobExecutionContext.getLoginUserName();
-                        if (loginUser == null) {
-                            loginUser = credentials.getPortalUserName();
-                        }
-                        serverInfo.setUserName(loginUser);
-                        jobExecutionContext.getExperiment().setUserName(loginUser);
-
-
-                        // inside the pbsCluser object
-
-                        String key = loginUser + jobExecutionContext.getHostName() + serverInfo.getPort();
-                        boolean recreate = false;
-                        synchronized (clusters) {
-                            if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                                recreate = true;
-                            } else if (clusters.containsKey(key)) {
-                                int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                                if (clusters.get(key).get(i).getSession().isConnected()) {
-                                    pbsRemoteCluster = clusters.get(key).get(i);
-                                } else {
-                                    clusters.get(key).remove(i);
-                                    recreate = true;
-                                }
-                                if (!recreate) {
-                                    try {
-                                        pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                                    } catch (Exception e) {
-                                        clusters.get(key).remove(i);
-                                        logger.info("Connection found the connection map is expired, so we create from the scratch");
-                                        maxClusterCount++;
-                                        recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
-                                    }
-                                }
-                                logger.info("Re-using the same connection used with the connection string:" + key);
-                            } else {
-                                recreate = true;
-                            }
-                            if (recreate) {
-                            	 JobManagerConfiguration jConfig = null;
-                                 String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
-                                 if (jobManager == null) {
-                                     logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
-                                     jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-                                 } else {
-                                     if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                         jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-                                     } else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                         jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
-                                     } else if (SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                         jConfig = CommonUtils.getUGEJobManager(installedParentPath);
-                                     } else if (LSF_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                         jConfig = CommonUtils.getLSFJobManager(installedParentPath);
-                                     }
-                                 }
-
-                                pbsRemoteCluster = new HPCRemoteCluster(serverInfo, tokenizedSSHAuthInfo,jConfig);
-                                List<RemoteCluster> pbsRemoteClusters = null;
-                                if (!(clusters.containsKey(key))) {
-                                    pbsRemoteClusters = new ArrayList<RemoteCluster>();
-                                } else {
-                                    pbsRemoteClusters = clusters.get(key);
-                                }
-                                pbsRemoteClusters.add(pbsRemoteCluster);
-                                clusters.put(key, pbsRemoteClusters);
-                            }
-                        }
-                    } catch (Exception e) {
-                        throw new GFacException("Error occurred...", e);
-                    }
-                    sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
-                    jobExecutionContext.addSecurityContext(jobExecutionContext.getHostName(), sshSecurityContext);
-                }
-            } catch (AppCatalogException e) {
-                throw new GFacException("Error while getting SSH Submission object from app catalog", e);
-            }
-        }
-    }
-
-    /**
-     * This method can be used to add third party resource security contexts
-     * @param jobExecutionContext
-     * @param sshAuth
-     * @throws GFacException
-     * @throws ApplicationSettingsException
-     */
-    public static void addSecurityContext(JobExecutionContext jobExecutionContext,SSHAuthWrapper sshAuth) throws GFacException, ApplicationSettingsException {
-        try {
-            if(sshAuth== null) {
-                throw new GFacException("Error adding security Context, because sshAuthWrapper is null");
-            }
-            SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-            AppCatalog appCatalog = jobExecutionContext.getAppCatalog();
-            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
-            SSHJobSubmission sshJobSubmission = null;
-			try {
-				sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
-			} catch (Exception e1) {
-				 logger.error("Not able to get SSHJobSubmission from registry");
-			}
-
-            RemoteCluster pbsRemoteCluster = null;
-            String key=sshAuth.getKey();
-            boolean recreate = false;
-            synchronized (clusters) {
-                if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                    recreate = true;
-                } else if (clusters.containsKey(key)) {
-                    int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                    if (clusters.get(key).get(i).getSession().isConnected()) {
-                        pbsRemoteCluster = clusters.get(key).get(i);
-                    } else {
-                        clusters.get(key).remove(i);
-                        recreate = true;
-                    }
-                    if (!recreate) {
-                        try {
-                            pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                        } catch (Exception e) {
-                            clusters.get(key).remove(i);
-                            logger.info("Connection found the connection map is expired, so we create from the scratch");
-                            maxClusterCount++;
-                            recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
-                        }
-                    }
-                    logger.info("Re-using the same connection used with the connection string:" + key);
-                } else {
-                    recreate = true;
-                }
-                if (recreate) {
-               	 JobManagerConfiguration jConfig = null;
-               	 String installedParentPath = null;
-               	 if(jobExecutionContext.getResourceJobManager()!= null){
-               		installedParentPath = jobExecutionContext.getResourceJobManager().getJobManagerBinPath();
-               	 }
-                 if (installedParentPath == null) {
-                     installedParentPath = "/";
-                 }
-					if (sshJobSubmission != null) {
-						String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
-						if (jobManager == null) {
-							logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
-							jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-						} else {
-							if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-								jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-							} else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-								jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
-							} else if (SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-								jConfig = CommonUtils.getUGEJobManager(installedParentPath);
-							} else if (LSF_JOB_MANAGER.equals(jobManager)) {
-								jConfig = CommonUtils.getLSFJobManager(installedParentPath);
-							}
-						}
-					}
-                    pbsRemoteCluster = new HPCRemoteCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),jConfig);
-                    key = sshAuth.getKey();
-                    List<RemoteCluster> pbsRemoteClusters = null;
-                    if (!(clusters.containsKey(key))) {
-                        pbsRemoteClusters = new ArrayList<RemoteCluster>();
-                    } else {
-                        pbsRemoteClusters = clusters.get(key);
-                    }
-                    pbsRemoteClusters.add(pbsRemoteCluster);
-                    clusters.put(key, pbsRemoteClusters);
-                }
-            }
-            sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
-            jobExecutionContext.addSecurityContext(key, sshSecurityContext);
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            throw new GFacException("Error adding security Context", e);
-        }
-    }
-
-
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws AppCatalogException, ApplicationSettingsException {
-        JobDescriptor jobDescriptor = new JobDescriptor();
-        TaskDetails taskData = jobExecutionContext.getTaskData();
-
-
-        // set email based job monitoring email  address if monitor mode is JOB_EMAIL_NOTIFICATION_MONITOR
-        boolean addJobNotifMail = isEmailBasedJobMonitor(jobExecutionContext);
-        String emailIds = null;
-        if (addJobNotifMail) {
-            emailIds = ServerSettings.getEmailBasedMonitorAddress();
-        }
-        // add all configured job notification email addresses.
-        if (ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_ENABLE).equalsIgnoreCase("true")) {
-            String flags = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_FLAGS);
-            if (flags != null && jobExecutionContext.getApplicationContext().getComputeResourceDescription().getHostName().equals("stampede.tacc.xsede.org")) {
-                flags = "ALL";
-            }
-            jobDescriptor.setMailOptions(flags);
-
-            String userJobNotifEmailIds = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_EMAILIDS);
-            if (userJobNotifEmailIds != null && !userJobNotifEmailIds.isEmpty()) {
-                if (emailIds != null && !emailIds.isEmpty()) {
-                    emailIds += ("," + userJobNotifEmailIds);
-                } else {
-                    emailIds = userJobNotifEmailIds;
-                }
-            }
-
-            if (taskData.isEnableEmailNotification()) {
-                List<String> emailList = jobExecutionContext.getTaskData().getEmailAddresses();
-                String elist = GFacUtils.listToCsv(emailList, ',');
-                if (elist != null && !elist.isEmpty()) {
-                    if (emailIds != null && !emailIds.isEmpty()) {
-                        emailIds = emailIds + "," + elist;
-                    } else {
-                        emailIds = elist;
-                    }
-                }
-            }
-        }
-        if (emailIds != null && !emailIds.isEmpty()) {
-            logger.info("Email list: " + emailIds);
-            jobDescriptor.setMailAddress(emailIds);
-        }
-        // this is common for any application descriptor
-
-        jobDescriptor.setCallBackIp(ServerSettings.getIp());
-        jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
-        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
-        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
-        jobDescriptor.setExecutablePath(jobExecutionContext.getApplicationContext()
-                .getApplicationDeploymentDescription().getExecutablePath());
-        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
-        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
-        String computationalProjectAccount = taskData.getTaskScheduling().getComputationalProjectAccount();
-        if (computationalProjectAccount == null){
-            ComputeResourcePreference computeResourcePreference = jobExecutionContext.getApplicationContext().getComputeResourcePreference();
-            if (computeResourcePreference != null) {
-                computationalProjectAccount = computeResourcePreference.getAllocationProjectNumber();
-            }
-        }
-        if (computationalProjectAccount != null) {
-            jobDescriptor.setAcountString(computationalProjectAccount);
-        }
-        // To make job name alpha numeric
-        jobDescriptor.setJobName("A" + String.valueOf(generateJobName()));
-        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
-
-        List<String> inputValues = new ArrayList<String>();
-        MessageContext input = jobExecutionContext.getInMessageContext();
-
-        // sort the inputs first and then build the command ListR
-        Comparator<InputDataObjectType> inputOrderComparator = new Comparator<InputDataObjectType>() {
-            @Override
-            public int compare(InputDataObjectType inputDataObjectType, InputDataObjectType t1) {
-                return inputDataObjectType.getInputOrder() - t1.getInputOrder();
-            }
-        };
-        Set<InputDataObjectType> sortedInputSet = new TreeSet<InputDataObjectType>(inputOrderComparator);
-        for (Object object : input.getParameters().values()) {
-            if (object instanceof InputDataObjectType) {
-                InputDataObjectType inputDOT = (InputDataObjectType) object;
-                sortedInputSet.add(inputDOT);
-            }
-        }
-        for (InputDataObjectType inputDataObjectType : sortedInputSet) {
-            if (!inputDataObjectType.isRequiredToAddedToCommandLine()) {
-                continue;
-            }
-            if (inputDataObjectType.getApplicationArgument() != null
-                    && !inputDataObjectType.getApplicationArgument().equals("")) {
-                inputValues.add(inputDataObjectType.getApplicationArgument());
-            }
-
-            if (inputDataObjectType.getValue() != null
-                    && !inputDataObjectType.getValue().equals("")) {
-                if (inputDataObjectType.getType() == DataType.URI) {
-                    // set only the relative path
-                    String filePath = inputDataObjectType.getValue();
-                    filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
-                    inputValues.add(filePath);
-                }else {
-                    inputValues.add(inputDataObjectType.getValue());
-                }
-
-            }
-        }
-        Map<String, Object> outputParams = jobExecutionContext.getOutMessageContext().getParameters();
-        for (Object outputParam : outputParams.values()) {
-            if (outputParam instanceof OutputDataObjectType) {
-                OutputDataObjectType output = (OutputDataObjectType) outputParam;
-                if (output.getApplicationArgument() != null
-                        && !output.getApplicationArgument().equals("")) {
-                    inputValues.add(output.getApplicationArgument());
-                }
-                if (output.getValue() != null && !output.getValue().equals("") && output.isRequiredToAddedToCommandLine()) {
-                    if (output.getType() == DataType.URI){
-                        String filePath = output.getValue();
-                        filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
-                        inputValues.add(filePath);
-                    }
-                }
-            }
-        }
-
-        jobDescriptor.setInputValues(inputValues);
-        jobDescriptor.setUserName(((GSISSHAbstractCluster) remoteCluster).getServerInfo().getUserName());
-        jobDescriptor.setShellName("/bin/bash");
-        jobDescriptor.setAllEnvExport(true);
-        jobDescriptor.setOwner(((HPCRemoteCluster) remoteCluster).getServerInfo().getUserName());
-
-        ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
-
-
-        ComputationalResourceScheduling taskScheduling = taskData.getTaskScheduling();
-        if (taskScheduling != null) {
-            int totalNodeCount = taskScheduling.getNodeCount();
-            int totalCPUCount = taskScheduling.getTotalCPUCount();
-
-
-            if (taskScheduling.getComputationalProjectAccount() != null) {
-                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
-            }
-            if (taskScheduling.getQueueName() != null) {
-                jobDescriptor.setQueueName(taskScheduling.getQueueName());
-            }
-
-            if (totalNodeCount > 0) {
-                jobDescriptor.setNodes(totalNodeCount);
-            }
-            if (taskScheduling.getComputationalProjectAccount() != null) {
-                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
-            }
-            if (taskScheduling.getQueueName() != null) {
-                jobDescriptor.setQueueName(taskScheduling.getQueueName());
-            }
-            if (totalCPUCount > 0) {
-                int ppn = totalCPUCount / totalNodeCount;
-                jobDescriptor.setProcessesPerNode(ppn);
-                jobDescriptor.setCPUCount(totalCPUCount);
-            }
-            if (taskScheduling.getWallTimeLimit() > 0) {
-                jobDescriptor.setMaxWallTime(String.valueOf(taskScheduling.getWallTimeLimit()));
-                if(resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)){
-                    jobDescriptor.setMaxWallTimeForLSF(String.valueOf(taskScheduling.getWallTimeLimit()));
-                }
-            }
-            if (taskScheduling.getTotalPhysicalMemory() > 0) {
-                jobDescriptor.setUsedMemory(taskScheduling.getTotalPhysicalMemory() + "");
-            }
-        } else {
-            logger.error("Task scheduling cannot be null at this point..");
-        }
-        ApplicationDeploymentDescription appDepDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        List<String> moduleCmds = appDepDescription.getModuleLoadCmds();
-        if (moduleCmds != null) {
-            for (String moduleCmd : moduleCmds) {
-                jobDescriptor.addModuleLoadCommands(moduleCmd);
-            }
-        }
-        List<String> preJobCommands = appDepDescription.getPreJobCommands();
-        if (preJobCommands != null) {
-            for (String preJobCommand : preJobCommands) {
-                jobDescriptor.addPreJobCommand(parseCommand(preJobCommand, jobExecutionContext));
-            }
-        }
-
-        List<String> postJobCommands = appDepDescription.getPostJobCommands();
-        if (postJobCommands != null) {
-            for (String postJobCommand : postJobCommands) {
-                jobDescriptor.addPostJobCommand(parseCommand(postJobCommand, jobExecutionContext));
-            }
-        }
-
-        ApplicationParallelismType parallelism = appDepDescription.getParallelism();
-        if (parallelism != null){
-            if (parallelism == ApplicationParallelismType.MPI || parallelism == ApplicationParallelismType.OPENMP || parallelism == ApplicationParallelismType.OPENMP_MPI){
-                Map<JobManagerCommand, String> jobManagerCommands = resourceJobManager.getJobManagerCommands();
-                if (jobManagerCommands != null && !jobManagerCommands.isEmpty()) {
-                    for (JobManagerCommand command : jobManagerCommands.keySet()) {
-                        if (command == JobManagerCommand.SUBMISSION) {
-                            String commandVal = jobManagerCommands.get(command);
-                            jobDescriptor.setJobSubmitter(commandVal);
-                        }
-                    }
-                }
-            }
-        }
-        return jobDescriptor;
-    }
-
-    public static boolean isEmailBasedJobMonitor(JobExecutionContext jobExecutionContext) throws AppCatalogException {
-        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
-            String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
-            SSHJobSubmission sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
-            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
-            return monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR;
-        } else {
-            return false;
-        }
-    }
-
-    private static int generateJobName() {
-        Random random = new Random();
-        int i = random.nextInt(Integer.MAX_VALUE);
-        i = i + 99999999;
-        if(i<0) {
-            i = i * (-1);
-        }
-        return i;
-    }
-
-    private static String parseCommand(String value, JobExecutionContext jobExecutionContext) {
-        String parsedValue = value.replaceAll("\\$workingDir", jobExecutionContext.getWorkingDir());
-        parsedValue = parsedValue.replaceAll("\\$inputDir", jobExecutionContext.getInputDir());
-        parsedValue = parsedValue.replaceAll("\\$outputDir", jobExecutionContext.getOutputDir());
-        return parsedValue;
-    }
-    /**
-     * This method can be used to set the Security Context if its not set and later use it in other places
-     * @param jobExecutionContext
-     * @param authenticationInfo
-     * @param userName
-     * @param hostName
-     * @param port
-     * @return
-     * @throws GFacException
-     */
-    public static String prepareSecurityContext(JobExecutionContext jobExecutionContext, AuthenticationInfo authenticationInfo
-            , String userName, String hostName, int port) throws GFacException {
-        ServerInfo serverInfo = new ServerInfo(userName, hostName);
-        String key = userName+hostName+port;
-        SSHAuthWrapper sshAuthWrapper = new SSHAuthWrapper(serverInfo, authenticationInfo, key);
-        if (jobExecutionContext.getSecurityContext(key) == null) {
-            try {
-                GFACSSHUtils.addSecurityContext(jobExecutionContext, sshAuthWrapper);
-            } catch (ApplicationSettingsException e) {
-                logger.error(e.getMessage());
-                try {
-                    StringWriter errors = new StringWriter();
-                    e.printStackTrace(new PrintWriter(errors));
-                    GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                } catch (GFacException e1) {
-                    logger.error(e1.getLocalizedMessage());
-                }
-                throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-            }
-        }
-        return key;
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.util;
+//
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.impl.HPCRemoteCluster;
+//import org.apache.airavata.registry.cpi.AppCatalog;
+//import org.apache.airavata.registry.cpi.AppCatalogException;
+//import org.apache.airavata.common.exception.ApplicationSettingsException;
+//import org.apache.airavata.common.utils.ServerSettings;
+//import org.apache.airavata.credential.store.credential.impl.ssh.SSHCredential;
+//import org.apache.airavata.gfac.core.GFacConstants;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.RequestData;
+//import org.apache.airavata.gfac.core.JobDescriptor;
+//import org.apache.airavata.gfac.core.JobManagerConfiguration;
+//import org.apache.airavata.gfac.core.cluster.ServerInfo;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.gsi.ssh.impl.GSISSHAbstractCluster;
+//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
+//import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;
+//import org.apache.airavata.gfac.ssh.context.SSHAuthWrapper;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
+//import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
+//import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+//import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.apache.airavata.model.appcatalog.computeresource.*;
+//import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
+//import org.apache.airavata.model.experiment.ComputationalResourceScheduling;
+//import org.apache.airavata.model.experiment.CorrectiveAction;
+//import org.apache.airavata.model.experiment.ErrorCategory;
+//import org.apache.airavata.model.experiment.TaskDetails;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.*;
+//
+//public class GFACSSHUtils {
+//    private final static Logger logger = LoggerFactory.getLogger(GFACSSHUtils.class);
+//
+//    public static Map<String, List<RemoteCluster>> clusters = new HashMap<String, List<RemoteCluster>>();
+//
+//    public static final String PBS_JOB_MANAGER = "pbs";
+//    public static final String SLURM_JOB_MANAGER = "slurm";
+//    public static final String SUN_GRID_ENGINE_JOB_MANAGER = "UGE";
+//    public static final String LSF_JOB_MANAGER = "LSF";
+//
+//    public static int maxClusterCount = 5;
+//
+//    /**
+//     * This method is to add computing resource specific authentication, if its a third party machine, use the other addSecurityContext
+//     * @param jobExecutionContext
+//     * @throws GFacException
+//     * @throws ApplicationSettingsException
+//     */
+//    public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
+//        JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+//        JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+//        if (preferredJobSubmissionProtocol == JobSubmissionProtocol.GLOBUS || preferredJobSubmissionProtocol == JobSubmissionProtocol.UNICORE) {
+//            logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
+//        } else if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
+//            try {
+//                AppCatalog appCatalog = jobExecutionContext.getAppCatalog();
+//                SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
+//                SecurityProtocol securityProtocol = sshJobSubmission.getSecurityProtocol();
+//                if (securityProtocol == SecurityProtocol.GSI || securityProtocol == SecurityProtocol.SSH_KEYS) {
+//                    SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
+//                    String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
+//                    RequestData requestData = new RequestData(jobExecutionContext.getGatewayID());
+//                    requestData.setTokenId(credentialStoreToken);
+//
+//                    ServerInfo serverInfo = new ServerInfo(null, jobExecutionContext.getHostName());
+//
+//                    RemoteCluster pbsRemoteCluster = null;
+//                    try {
+//                        AuthenticationInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
+//                        String installedParentPath = jobExecutionContext.getResourceJobManager().getJobManagerBinPath();
+//                        if (installedParentPath == null) {
+//                            installedParentPath = "/";
+//                        }
+//
+//                        SSHCredential credentials =((TokenizedSSHAuthInfo)tokenizedSSHAuthInfo).getCredentials();// this is just a call to get and set credentials in to this object,data will be used
+//                        if(credentials.getPrivateKey()==null || credentials.getPublicKey()==null){
+//                            // now we fall back to username password authentication
+//                            Properties configurationProperties = ServerSettings.getProperties();
+//                            tokenizedSSHAuthInfo = new DefaultPasswordAuthenticationInfo(configurationProperties.getProperty(GFacConstants.SSH_PASSWORD));
+//                        }
+//                        // This should be the login user name from compute resource preference
+//                        String loginUser = jobExecutionContext.getLoginUserName();
+//                        if (loginUser == null) {
+//                            loginUser = credentials.getPortalUserName();
+//                        }
+//                        serverInfo.setUserName(loginUser);
+//                        jobExecutionContext.getExperiment().setUserName(loginUser);
+//
+//
+//                        // inside the pbsCluser object
+//
+//                        String key = loginUser + jobExecutionContext.getHostName() + serverInfo.getPort();
+//                        boolean recreate = false;
+//                        synchronized (clusters) {
+//                            if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
+//                                recreate = true;
+//                            } else if (clusters.containsKey(key)) {
+//                                int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
+//                                if (clusters.get(key).get(i).getSession().isConnected()) {
+//                                    pbsRemoteCluster = clusters.get(key).get(i);
+//                                } else {
+//                                    clusters.get(key).remove(i);
+//                                    recreate = true;
+//                                }
+//                                if (!recreate) {
+//                                    try {
+//                                        pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+//                                    } catch (Exception e) {
+//                                        clusters.get(key).remove(i);
+//                                        logger.info("Connection found the connection map is expired, so we create from the scratch");
+//                                        maxClusterCount++;
+//                                        recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
+//                                    }
+//                                }
+//                                logger.info("Re-using the same connection used with the connection string:" + key);
+//                            } else {
+//                                recreate = true;
+//                            }
+//                            if (recreate) {
+//                            	 JobManagerConfiguration jConfig = null;
+//                                 String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
+//                                 if (jobManager == null) {
+//                                     logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
+//                                     jConfig = CommonUtils.getPBSJobManager(installedParentPath);
+//                                 } else {
+//                                     if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
+//                                         jConfig = CommonUtils.getPBSJobManager(installedParentPath);
+//                                     } else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
+//                                         jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
+//                                     } else if (SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
+//                                         jConfig = CommonUtils.getUGEJobManager(installedParentPath);
+//                                     } else if (LSF_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
+//                                         jConfig = CommonUtils.getLSFJobManager(installedParentPath);
+//                                     }
+//                                 }
+//
+//                                pbsRemoteCluster = new HPCRemoteCluster(serverInfo, tokenizedSSHAuthInfo,jConfig);
+//                                List<RemoteCluster> pbsRemoteClusters = null;
+//                                if (!(clusters.containsKey(key))) {
+//                                    pbsRemoteClusters = new ArrayList<RemoteCluster>();
+//                                } else {
+//                                    pbsRemoteClusters = clusters.get(key);
+//                                }
+//                                pbsRemoteClusters.add(pbsRemoteCluster);
+//                                clusters.put(key, pbsRemoteClusters);
+//                            }
+//                        }
+//                    } catch (Exception e) {
+//                        throw new GFacException("Error occurred...", e);
+//                    }
+//                    sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
+//                    jobExecutionContext.addSecurityContext(jobExecutionContext.getHostName(), sshSecurityContext);
+//                }
+//            } catch (AppCatalogException e) {
+//                throw new GFacException("Error while getting SSH Submission object from app catalog", e);
+//            }
+//        }
+//    }
+//
+//    /**
+//     * This method can be used to add third party resource security contexts
+//     * @param jobExecutionContext
+//     * @param sshAuth
+//     * @throws GFacException
+//     * @throws ApplicationSettingsException
+//     */
+//    public static void addSecurityContext(JobExecutionContext jobExecutionContext,SSHAuthWrapper sshAuth) throws GFacException, ApplicationSettingsException {
+//        try {
+//            if(sshAuth== null) {
+//                throw new GFacException("Error adding security Context, because sshAuthWrapper is null");
+//            }
+//            SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
+//            AppCatalog appCatalog = jobExecutionContext.getAppCatalog();
+//            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+//            SSHJobSubmission sshJobSubmission = null;
+//			try {
+//				sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
+//			} catch (Exception e1) {
+//				 logger.error("Not able to get SSHJobSubmission from registry");
+//			}
+//
+//            RemoteCluster pbsRemoteCluster = null;
+//            String key=sshAuth.getKey();
+//            boolean recreate = false;
+//            synchronized (clusters) {
+//                if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
+//                    recreate = true;
+//                } else if (clusters.containsKey(key)) {
+//                    int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
+//                    if (clusters.get(key).get(i).getSession().isConnected()) {
+//                        pbsRemoteCluster = clusters.get(key).get(i);
+//                    } else {
+//                        clusters.get(key).remove(i);
+//                        recreate = true;
+//                    }
+//                    if (!recreate) {
+//                        try {
+//                            pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+//                        } catch (Exception e) {
+//                            clusters.get(key).remove(i);
+//                            logger.info("Connection found the connection map is expired, so we create from the scratch");
+//                            maxClusterCount++;
+//                            recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
+//                        }
+//                    }
+//                    logger.info("Re-using the same connection used with the connection string:" + key);
+//                } else {
+//                    recreate = true;
+//                }
+//                if (recreate) {
+//               	 JobManagerConfiguration jConfig = null;
+//               	 String installedParentPath = null;
+//               	 if(jobExecutionContext.getResourceJobManager()!= null){
+//               		installedParentPath = jobExecutionContext.getResourceJobManager().getJobManagerBinPath();
+//               	 }
+//                 if (installedParentPath == null) {
+//                     installedParentPath = "/";
+//                 }
+//					if (sshJobSubmission != null) {
+//						String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
+//						if (jobManager == null) {
+//							logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
+//							jConfig = CommonUtils.getPBSJobManager(installedParentPath);
+//						} else {
+//							if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
+//								jConfig = CommonUtils.getPBSJobManager(installedParentPath);
+//							} else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
+//								jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
+//							} else if (SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
+//								jConfig = CommonUtils.getUGEJobManager(installedParentPath);
+//							} else if (LSF_JOB_MANAGER.equals(jobManager)) {
+//								jConfig = CommonUtils.getLSFJobManager(installedParentPath);
+//							}
+//						}
+//					}
+//                    pbsRemoteCluster = new HPCRemoteCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),jConfig);
+//                    key = sshAuth.getKey();
+//                    List<RemoteCluster> pbsRemoteClusters = null;
+//                    if (!(clusters.containsKey(key))) {
+//                        pbsRemoteClusters = new ArrayList<RemoteCluster>();
+//                    } else {
+//                        pbsRemoteClusters = clusters.get(key);
+//                    }
+//                    pbsRemoteClusters.add(pbsRemoteCluster);
+//                    clusters.put(key, pbsRemoteClusters);
+//                }
+//            }
+//            sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
+//            jobExecutionContext.addSecurityContext(key, sshSecurityContext);
+//        } catch (Exception e) {
+//            logger.error(e.getMessage(), e);
+//            throw new GFacException("Error adding security Context", e);
+//        }
+//    }
+//
+//
+//    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws AppCatalogException, ApplicationSettingsException {
+//        JobDescriptor jobDescriptor = new JobDescriptor();
+//        TaskDetails taskData = jobExecutionContext.getTaskData();
+//
+//
+//        // set email based job monitoring email  address if monitor mode is JOB_EMAIL_NOTIFICATION_MONITOR
+//        boolean addJobNotifMail = isEmailBasedJobMonitor(jobExecutionContext);
+//        String emailIds = null;
+//        if (addJobNotifMail) {
+//            emailIds = ServerSettings.getEmailBasedMonitorAddress();
+//        }
+//        // add all configured job notification email addresses.
+//        if (ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_ENABLE).equalsIgnoreCase("true")) {
+//            String flags = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_FLAGS);
+//            if (flags != null && jobExecutionContext.getApplicationContext().getComputeResourceDescription().getHostName().equals("stampede.tacc.xsede.org")) {
+//                flags = "ALL";
+//            }
+//            jobDescriptor.setMailOptions(flags);
+//
+//            String userJobNotifEmailIds = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_EMAILIDS);
+//            if (userJobNotifEmailIds != null && !userJobNotifEmailIds.isEmpty()) {
+//                if (emailIds != null && !emailIds.isEmpty()) {
+//                    emailIds += ("," + userJobNotifEmailIds);
+//                } else {
+//                    emailIds = userJobNotifEmailIds;
+//                }
+//            }
+//
+//            if (taskData.isEnableEmailNotification()) {
+//                List<String> emailList = jobExecutionContext.getTaskData().getEmailAddresses();
+//                String elist = GFacUtils.listToCsv(emailList, ',');
+//                if (elist != null && !elist.isEmpty()) {
+//                    if (emailIds != null && !emailIds.isEmpty()) {
+//                        emailIds = emailIds + "," + elist;
+//                    } else {
+//                        emailIds = elist;
+//                    }
+//                }
+//            }
+//        }
+//        if (emailIds != null && !emailIds.isEmpty()) {
+//            logger.info("Email list: " + emailIds);
+//            jobDescriptor.setMailAddress(emailIds);
+//        }
+//        // this is common for any application descriptor
+//
+//        jobDescriptor.setCallBackIp(ServerSettings.getIp());
+//        jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
+//        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
+//        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
+//        jobDescriptor.setExecutablePath(jobExecutionContext.getApplicationContext()
+//                .getApplicationDeploymentDescription().getExecutablePath());
+//        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
+//        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
+//        String computationalProjectAccount = taskData.getTaskScheduling().getComputationalProjectAccount();
+//        if (computationalProjectAccount == null){
+//            ComputeResourcePreference computeResourcePreference = jobExecutionContext.getApplicationContext().getComputeResourcePreference();
+//            if (computeResourcePreference != null) {
+//                computationalProjectAccount = computeResourcePreference.getAllocationProjectNumber();
+//            }
+//        }
+//        if (computationalProjectAccount != null) {
+//            jobDescriptor.setAcountString(computationalProjectAccount);
+//        }
+//        // To make job name alpha numeric
+//        jobDescriptor.setJobName("A" + String.valueOf(generateJobName()));
+//        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
+//
+//        List<String> inputValues = new ArrayList<String>();
+//        MessageContext input = jobExecutionContext.getInMessageContext();
+//
+//        // sort the inputs first and then build the command ListR
+//        Comparator<InputDataObjectType> inputOrderComparator = new Comparator<InputDataObjectType>() {
+//            @Override
+//            public int compare(InputDataObjectType inputDataObjectType, InputDataObjectType t1) {
+//                return inputDataObjectType.getInputOrder() - t1.getInputOrder();
+//            }
+//        };
+//        Set<InputDataObjectType> sortedInputSet = new TreeSet<InputDataObjectType>(inputOrderComparator);
+//        for (Object object : input.getParameters().values()) {
+//            if (object instanceof InputDataObjectType) {
+//                InputDataObjectType inputDOT = (InputDataObjectType) object;
+//                sortedInputSet.add(inputDOT);
+//            }
+//        }
+//        for (InputDataObjectType inputDataObjectType : sortedInputSet) {
+//            if (!inputDataObjectType.isRequiredToAddedToCommandLine()) {
+//                continue;
+//            }
+//            if (inputDataObjectType.getApplicationArgument() != null
+//                    && !inputDataObjectType.getApplicationArgument().equals("")) {
+//                inputValues.add(inputDataObjectType.getApplicationArgument());
+//            }
+//
+//            if (inputDataObjectType.getValue() != null
+//                    && !inputDataObjectType.getValue().equals("")) {
+//                if (inputDataObjectType.getType() == DataType.URI) {
+//                    // set only the relative path
+//                    String filePath = inputDataObjectType.getValue();
+//                    filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
+//                    inputValues.add(filePath);
+//                }else {
+//                    inputValues.add(inputDataObjectType.getValue());
+//                }
+//
+//            }
+//        }
+//        Map<String, Object> outputParams = jobExecutionContext.getOutMessageContext().getParameters();
+//        for (Object outputParam : outputParams.values()) {
+//            if (outputParam instanceof OutputDataObjectType) {
+//                OutputDataObjectType output = (OutputDataObjectType) outputParam;
+//                if (output.getApplicationArgument() != null
+//                        && !output.getApplicationArgument().equals("")) {
+//                    inputValues.add(output.getApplicationArgument());
+//                }
+//                if (output.getValue() != null && !output.getValue().equals("") && output.isRequiredToAddedToCommandLine()) {
+//                    if (output.getType() == DataType.URI){
+//                        String filePath = output.getValue();
+//                        filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
+//                        inputValues.add(filePath);
+//                    }
+//                }
+//            }
+//        }
+//
+//        jobDescriptor.setInputValues(inputValues);
+//        jobDescriptor.setUserName(((GSISSHAbstractCluster) remoteCluster).getServerInfo().getUserName());
+//        jobDescriptor.setShellName("/bin/bash");
+//        jobDescriptor.setAllEnvExport(true);
+//        jobDescriptor.setOwner(((HPCRemoteCluster) remoteCluster).getServerInfo().getUserName());
+//
+//        ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
+//
+//
+//        ComputationalResourceScheduling taskScheduling = taskData.getTaskScheduling();
+//        if (taskScheduling != null) {
+//            int totalNodeCount = taskScheduling.getNodeCount();
+//            int totalCPUCount = taskScheduling.getTotalCPUCount();
+//
+//
+//            if (taskScheduling.getComputationalProjectAccount() != null) {
+//                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
+//            }
+//            if (taskScheduling.getQueueName() != null) {
+//                jobDescriptor.setQueueName(taskScheduling.getQueueName());
+//            }
+//
+//            if (totalNodeCount > 0) {
+//                jobDescriptor.setNodes(totalNodeCount);
+//            }
+//            if (taskScheduling.getComputationalProjectAccount() != null) {
+//                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
+//            }
+//            if (taskScheduling.getQueueName() != null) {
+//                jobDescriptor.setQueueName(taskScheduling.getQueueName());
+//            }
+//            if (totalCPUCount > 0) {
+//                int ppn = totalCPUCount / totalNodeCount;
+//                jobDescriptor.setProcessesPerNode(ppn);
+//                jobDescriptor.setCPUCount(totalCPUCount);
+//            }
+//            if (taskScheduling.getWallTimeLimit() > 0) {
+//                jobDescriptor.setMaxWallTime(String.valueOf(taskScheduling.getWallTimeLimit()));
+//                if(resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)){
+//                    jobDescriptor.setMaxWallTimeForLSF(String.valueOf(taskScheduling.getWallTimeLimit()));
+//                }
+//            }
+//            if (taskScheduling.getTotalPhysicalMemory() > 0) {
+//                jobDescriptor.setUsedMemory(taskScheduling.getTotalPhysicalMemory() + "");
+//            }
+//        } else {
+//            logger.error("Task scheduling cannot be null at this point..");
+//        }
+//        ApplicationDeploymentDescription appDepDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
+//        List<String> moduleCmds = appDepDescription.getModuleLoadCmds();
+//        if (moduleCmds != null) {
+//            for (String moduleCmd : moduleCmds) {
+//                jobDescriptor.addModuleLoadCommands(moduleCmd);
+//            }
+//        }
+//        List<String> preJobCommands = appDepDescription.getPreJobCommands();
+//        if (preJobCommands != null) {
+//            for (String preJobCommand : preJobCommands) {
+//                jobDescriptor.addPreJobCommand(parseCommand(preJobCommand, jobExecutionContext));
+//            }
+//        }
+//
+//        List<String> postJobCommands = appDepDescription.getPostJobCommands();
+//        if (postJobCommands != null) {
+//            for (String postJobCommand : postJobCommands) {
+//                jobDescriptor.addPostJobCommand(parseCommand(postJobCommand, jobExecutionContext));
+//            }
+//        }
+//
+//        ApplicationParallelismType parallelism = appDepDescription.getParallelism();
+//        if (parallelism != null){
+//            if (parallelism == ApplicationParallelismType.MPI || parallelism == ApplicationParallelismType.OPENMP || parallelism == ApplicationParallelismType.OPENMP_MPI){
+//                Map<JobManagerCommand, String> jobManagerCommands = resourceJobManager.getJobManagerCommands();
+//                if (jobManagerCommands != null && !jobManagerCommands.isEmpty()) {
+//                    for (JobManagerCommand command : jobManagerCommands.keySet()) {
+//                        if (command == JobManagerCommand.SUBMISSION) {
+//                            String commandVal = jobManagerCommands.get(command);
+//                            jobDescriptor.setJobSubmitter(commandVal);
+//                        }
+//                    }
+//                }
+//            }
+//        }
+//        return jobDescriptor;
+//    }
+//
+//    public static boolean isEmailBasedJobMonitor(JobExecutionContext jobExecutionContext) throws AppCatalogException {
+//        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
+//            String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
+//            SSHJobSubmission sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
+//            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
+//            return monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR;
+//        } else {
+//            return false;
+//        }
+//    }
+//
+//    private static int generateJobName() {
+//        Random random = new Random();
+//        int i = random.nextInt(Integer.MAX_VALUE);
+//        i = i + 99999999;
+//        if(i<0) {
+//            i = i * (-1);
+//        }
+//        return i;
+//    }
+//
+//    private static String parseCommand(String value, JobExecutionContext jobExecutionContext) {
+//        String parsedValue = value.replaceAll("\\$workingDir", jobExecutionContext.getWorkingDir());
+//        parsedValue = parsedValue.replaceAll("\\$inputDir", jobExecutionContext.getInputDir());
+//        parsedValue = parsedValue.replaceAll("\\$outputDir", jobExecutionContext.getOutputDir());
+//        return parsedValue;
+//    }
+//    /**
+//     * This method can be used to set the Security Context if its not set and later use it in other places
+//     * @param jobExecutionContext
+//     * @param authenticationInfo
+//     * @param userName
+//     * @param hostName
+//     * @param port
+//     * @return
+//     * @throws GFacException
+//     */
+//    public static String prepareSecurityContext(JobExecutionContext jobExecutionContext, AuthenticationInfo authenticationInfo
+//            , String userName, String hostName, int port) throws GFacException {
+//        ServerInfo serverInfo = new ServerInfo(userName, hostName);
+//        String key = userName+hostName+port;
+//        SSHAuthWrapper sshAuthWrapper = new SSHAuthWrapper(serverInfo, authenticationInfo, key);
+//        if (jobExecutionContext.getSecurityContext(key) == null) {
+//            try {
+//                GFACSSHUtils.addSecurityContext(jobExecutionContext, sshAuthWrapper);
+//            } catch (ApplicationSettingsException e) {
+//                logger.error(e.getMessage());
+//                try {
+//                    StringWriter errors = new StringWriter();
+//                    e.printStackTrace(new PrintWriter(errors));
+//                    GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//                } catch (GFacException e1) {
+//                    logger.error(e1.getLocalizedMessage());
+//                }
+//                throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//            }
+//        }
+//        return key;
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
index 890b858..cadb251 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
@@ -1,96 +1,96 @@
-package org.apache.airavata.gfac.ssh.util;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * To handle outputs of different data types
- * 
- */
-public class HandleOutputs {
-	private static final Logger log = LoggerFactory.getLogger(HandleOutputs.class);
-
-	public static List<OutputDataObjectType> handleOutputs(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws GFacHandlerException {
-		List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-		try {
-			String outputDataDir = File.separator + "tmp" + File.separator + jobExecutionContext.getExperimentID();
-			(new File(outputDataDir)).mkdirs();
-
-			List<OutputDataObjectType> outputs = jobExecutionContext.getTaskData().getApplicationOutputs();
-			List<String> outputList = remoteCluster.listDirectory(jobExecutionContext.getWorkingDir());
-			boolean missingOutput = false;
-
-			for (OutputDataObjectType output : outputs) {
-				// FIXME: Validation of outputs based on required and optional and search based on REGEX provided in search.
-
-				if (DataType.URI == output.getType()) {
-                    // for failed jobs outputs are not generated. So we should not download outputs
-                    if (GFacUtils.isFailedJob(jobExecutionContext)){
-                       continue;
-                    }
-					String outputFile = output.getValue();
-					String fileName = outputFile.substring(outputFile.lastIndexOf(File.separatorChar) + 1, outputFile.length());
-
-					if (output.getLocation() == null && !outputList.contains(fileName) && output.isIsRequired()) {
-						missingOutput = true;
-					} else {
-						remoteCluster.scpFrom(outputFile, outputDataDir);
-						String localFile = outputDataDir + File.separator + fileName;
-						jobExecutionContext.addOutputFile(localFile);
-						output.setValue(localFile);
-						outputArray.add(output);
-					}
-
-				} else if (DataType.STDOUT == output.getType()) {
-					String downloadFile = jobExecutionContext.getStandardOutput();
-					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-					remoteCluster.scpFrom(downloadFile, outputDataDir);
-					String localFile = outputDataDir + File.separator + fileName;
-					jobExecutionContext.addOutputFile(localFile);
-					jobExecutionContext.setStandardOutput(localFile);
-					output.setValue(localFile);
-					outputArray.add(output);
-
-				} else if (DataType.STDERR == output.getType()) {
-					String downloadFile = jobExecutionContext.getStandardError();
-					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-					remoteCluster.scpFrom(downloadFile, outputDataDir);
-					String localFile = outputDataDir + File.separator + fileName;
-					jobExecutionContext.addOutputFile(localFile);
-					jobExecutionContext.setStandardError(localFile);
-					output.setValue(localFile);
-					outputArray.add(output);
-
-				}
-			}
-			if (outputArray == null || outputArray.isEmpty()) {
-				log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
-				if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
-					throw new GFacHandlerException("Empty Output returned from the Application, Double check the application"
-							+ "and ApplicationDescriptor output Parameter Names");
-				}
-			}
-
-			if (missingOutput) {
-				String arrayString = Arrays.deepToString(outputArray.toArray());
-				log.error(arrayString);
-				throw new GFacHandlerException("Required output is missing");
-			}
-		} catch (Exception e) {
-			throw new GFacHandlerException(e);
-		}
-		jobExecutionContext.getTaskData().setApplicationOutputs(outputArray);
-		return outputArray;
-	}
-}
+//package org.apache.airavata.gfac.ssh.util;
+//
+//import java.io.File;
+//import java.util.ArrayList;
+//import java.util.Arrays;
+//import java.util.List;
+//
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+///**
+// * To handle outputs of different data types
+// *
+// */
+//public class HandleOutputs {
+//	private static final Logger log = LoggerFactory.getLogger(HandleOutputs.class);
+//
+//	public static List<OutputDataObjectType> handleOutputs(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws GFacHandlerException {
+//		List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
+//		try {
+//			String outputDataDir = File.separator + "tmp" + File.separator + jobExecutionContext.getExperimentID();
+//			(new File(outputDataDir)).mkdirs();
+//
+//			List<OutputDataObjectType> outputs = jobExecutionContext.getTaskData().getApplicationOutputs();
+//			List<String> outputList = remoteCluster.listDirectory(jobExecutionContext.getWorkingDir());
+//			boolean missingOutput = false;
+//
+//			for (OutputDataObjectType output : outputs) {
+//				// FIXME: Validation of outputs based on required and optional and search based on REGEX provided in search.
+//
+//				if (DataType.URI == output.getType()) {
+//                    // for failed jobs outputs are not generated. So we should not download outputs
+//                    if (GFacUtils.isFailedJob(jobExecutionContext)){
+//                       continue;
+//                    }
+//					String outputFile = output.getValue();
+//					String fileName = outputFile.substring(outputFile.lastIndexOf(File.separatorChar) + 1, outputFile.length());
+//
+//					if (output.getLocation() == null && !outputList.contains(fileName) && output.isIsRequired()) {
+//						missingOutput = true;
+//					} else {
+//						remoteCluster.scpFrom(outputFile, outputDataDir);
+//						String localFile = outputDataDir + File.separator + fileName;
+//						jobExecutionContext.addOutputFile(localFile);
+//						output.setValue(localFile);
+//						outputArray.add(output);
+//					}
+//
+//				} else if (DataType.STDOUT == output.getType()) {
+//					String downloadFile = jobExecutionContext.getStandardOutput();
+//					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
+//					remoteCluster.scpFrom(downloadFile, outputDataDir);
+//					String localFile = outputDataDir + File.separator + fileName;
+//					jobExecutionContext.addOutputFile(localFile);
+//					jobExecutionContext.setStandardOutput(localFile);
+//					output.setValue(localFile);
+//					outputArray.add(output);
+//
+//				} else if (DataType.STDERR == output.getType()) {
+//					String downloadFile = jobExecutionContext.getStandardError();
+//					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
+//					remoteCluster.scpFrom(downloadFile, outputDataDir);
+//					String localFile = outputDataDir + File.separator + fileName;
+//					jobExecutionContext.addOutputFile(localFile);
+//					jobExecutionContext.setStandardError(localFile);
+//					output.setValue(localFile);
+//					outputArray.add(output);
+//
+//				}
+//			}
+//			if (outputArray == null || outputArray.isEmpty()) {
+//				log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
+//				if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
+//					throw new GFacHandlerException("Empty Output returned from the Application, Double check the application"
+//							+ "and ApplicationDescriptor output Parameter Names");
+//				}
+//			}
+//
+//			if (missingOutput) {
+//				String arrayString = Arrays.deepToString(outputArray.toArray());
+//				log.error(arrayString);
+//				throw new GFacHandlerException("Required output is missing");
+//			}
+//		} catch (Exception e) {
+//			throw new GFacHandlerException(e);
+//		}
+//		jobExecutionContext.getTaskData().setApplicationOutputs(outputArray);
+//		return outputArray;
+//	}
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-service/src/main/java/org/apache/airavata/gfac/server/GfacServer.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-service/src/main/java/org/apache/airavata/gfac/server/GfacServer.java b/modules/gfac/gfac-service/src/main/java/org/apache/airavata/gfac/server/GfacServer.java
index b076145..53e687a 100644
--- a/modules/gfac/gfac-service/src/main/java/org/apache/airavata/gfac/server/GfacServer.java
+++ b/modules/gfac/gfac-service/src/main/java/org/apache/airavata/gfac/server/GfacServer.java
@@ -52,8 +52,8 @@ public class GfacServer implements IServer{
     public void StartGfacServer(GfacService.Processor<GfacServerHandler> gfacServerHandlerProcessor)
             throws Exception {
         try {
-            final int serverPort = Integer.parseInt(ServerSettings.getSetting(Constants.GFAC_SERVER_PORT, "8950"));
-            final String serverHost = ServerSettings.getSetting(Constants.GFAC_SERVER_HOST, null);
+            final int serverPort = Integer.parseInt(ServerSettings.getGFacServerPort());
+            final String serverHost = ServerSettings.getGfacServerHost();
 
             InetSocketAddress inetSocketAddress = new InetSocketAddress(serverHost, serverPort);
 


[3/5] airavata git commit: fixing compilation issues

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index 55bb987..175351f 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@ -1,225 +1,225 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.*;
-
-/**
- * This handler will copy outputs from airavata installed local directory
- * to a remote location, prior to this handler SCPOutputHandler should be invoked
- * Should add following configuration to gfac-config.xml and configure the keys properly
- * <Handler class="AdvancedSCPOutputHandler">
-                            <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
-                            <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
-                        <property name="userName" value="airavata"/>
-                        <property name="hostName" value="gw98.iu.xsede.org"/>
-                        <property name="outputPath" value="/home/airavata/outputData"/>
-                        <property name="passPhrase" value="/home/airavata/outputData"/>
-                        <property name="password" value="/home/airavata/outputData"/>
-
- */
-public class AdvancedSCPOutputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPOutputHandler.class);
-
-    public static final int DEFAULT_SSH_PORT = 22;
-
-    private String password = null;
-
-    private String publicKeyPath;
-
-    private String passPhrase;
-
-    private String privateKeyPath;
-
-    private String userName;
-
-    private String hostName;
-
-    private String outputPath;
-
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-        password = (String)properties.get("password");
-        passPhrase = (String)properties.get("passPhrase");
-        privateKeyPath = (String)properties.get("privateKeyPath");
-        publicKeyPath = (String)properties.get("publicKeyPath");
-        userName = (String)properties.get("userName");
-        hostName = (String)properties.get("hostName");
-        outputPath = (String)properties.get("outputPath");
-    }
-
-    @Override
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-    	RemoteCluster remoteCluster = null;
-        AuthenticationInfo authenticationInfo = null;
-        if (password != null) {
-            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
-        } else {
-            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
-                    this.passPhrase);
-        }
-        try {
-            String hostName = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostName) == null) {
-                try {
-                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
-                } catch (ApplicationSettingsException e) {
-                    log.error(e.getMessage());
-                    try {
-                        StringWriter errors = new StringWriter();
-                        e.printStackTrace(new PrintWriter(errors));
-         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-         			} catch (GFacException e1) {
-         				 log.error(e1.getLocalizedMessage());
-         			}
-                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-                }
-            }
-            String standardError = jobExecutionContext.getStandardError();
-            String standardOutput = jobExecutionContext.getStandardOutput();
-            super.invoke(jobExecutionContext);
-            // Server info
-            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){
-                try{
-                    URL outputPathURL = new URL(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir());
-                    this.userName = outputPathURL.getUserInfo();
-                    this.hostName = outputPathURL.getHost();
-                    outputPath = outputPathURL.getPath();
-                } catch (MalformedURLException e) {
-                    log.error(e.getLocalizedMessage(),e);
-                }
-            }
-            String key = GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
-            remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
-            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && !jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().isPersistOutputData()){
-            outputPath = outputPath + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID()
-                    + File.separator;
-                remoteCluster.makeDirectory(outputPath);
-            }
-            remoteCluster.scpTo(outputPath, standardError);
-            remoteCluster.scpTo(outputPath, standardOutput);
-            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-            Set<String> keys = output.keySet();
-            for (String paramName : keys) {
-                OutputDataObjectType outputDataObjectType = (OutputDataObjectType) output.get(paramName);
-                if (outputDataObjectType.getType() == DataType.URI) {
-                    // for failed jobs outputs are not generated. So we should not download outputs
-                    if (GFacUtils.isFailedJob(jobExecutionContext)){
-                        continue;
-                    }
-                	String downloadFile = outputDataObjectType.getValue();
-                    if(downloadFile == null || !(new File(downloadFile).isFile())){
-                        GFacUtils.saveErrorDetails(jobExecutionContext, "Empty Output returned from the application", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                		throw new GFacHandlerException("Empty Output returned from the application.." );
-                	}
-                	remoteCluster.scpTo(outputPath, downloadFile);
-                    String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
-                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-                    dataObjectType.setName(paramName);
-                    dataObjectType.setType(DataType.URI);
-                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-                    outputArray.add(dataObjectType);
-                }else if (outputDataObjectType.getType() == DataType.STDOUT) {
-                    remoteCluster.scpTo(outputPath, standardOutput);
-                    String fileName = standardOutput.substring(standardOutput.lastIndexOf(File.separatorChar)+1, standardOutput.length());
-                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-                    dataObjectType.setName(paramName);
-                    dataObjectType.setType(DataType.STDOUT);
-                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-                    outputArray.add(dataObjectType);
-                }else if (outputDataObjectType.getType() == DataType.STDERR) {
-                    remoteCluster.scpTo(outputPath, standardError);
-                    String fileName = standardError.substring(standardError.lastIndexOf(File.separatorChar)+1, standardError.length());
-                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-                    dataObjectType.setName(paramName);
-                    dataObjectType.setType(DataType.STDERR);
-                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-                    outputArray.add(dataObjectType);
-                }
-             }
-           experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-        } catch (SSHApiException e) {
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-			} catch (GFacException e1) {
-				 log.error(e1.getLocalizedMessage());
-			}
-            log.error("Error transfering files to remote host : " + hostName + " with the user: " + userName);
-            log.error(e.getMessage());
-            throw new GFacHandlerException(e);
-        } catch (Exception e) {
-        	 try {
- 				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
- 			} catch (GFacException e1) {
- 				 log.error(e1.getLocalizedMessage());
- 			}
-        	throw new GFacHandlerException(e);
-        }
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.common.exception.ApplicationSettingsException;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.SSHApiException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
+//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.apache.airavata.model.experiment.CorrectiveAction;
+//import org.apache.airavata.model.experiment.ErrorCategory;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.net.MalformedURLException;
+//import java.net.URL;
+//import java.util.*;
+//
+///**
+// * This handler will copy outputs from airavata installed local directory
+// * to a remote location, prior to this handler SCPOutputHandler should be invoked
+// * Should add following configuration to gfac-config.xml and configure the keys properly
+// * <Handler class="AdvancedSCPOutputHandler">
+//                            <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
+//                            <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
+//                        <property name="userName" value="airavata"/>
+//                        <property name="hostName" value="gw98.iu.xsede.org"/>
+//                        <property name="outputPath" value="/home/airavata/outputData"/>
+//                        <property name="passPhrase" value="/home/airavata/outputData"/>
+//                        <property name="password" value="/home/airavata/outputData"/>
+//
+// */
+//public class AdvancedSCPOutputHandler extends AbstractHandler {
+//    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPOutputHandler.class);
+//
+//    public static final int DEFAULT_SSH_PORT = 22;
+//
+//    private String password = null;
+//
+//    private String publicKeyPath;
+//
+//    private String passPhrase;
+//
+//    private String privateKeyPath;
+//
+//    private String userName;
+//
+//    private String hostName;
+//
+//    private String outputPath;
+//
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//        password = (String)properties.get("password");
+//        passPhrase = (String)properties.get("passPhrase");
+//        privateKeyPath = (String)properties.get("privateKeyPath");
+//        publicKeyPath = (String)properties.get("publicKeyPath");
+//        userName = (String)properties.get("userName");
+//        hostName = (String)properties.get("hostName");
+//        outputPath = (String)properties.get("outputPath");
+//    }
+//
+//    @Override
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//    	RemoteCluster remoteCluster = null;
+//        AuthenticationInfo authenticationInfo = null;
+//        if (password != null) {
+//            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
+//        } else {
+//            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
+//                    this.passPhrase);
+//        }
+//        try {
+//            String hostName = jobExecutionContext.getHostName();
+//            if (jobExecutionContext.getSecurityContext(hostName) == null) {
+//                try {
+//                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//                } catch (ApplicationSettingsException e) {
+//                    log.error(e.getMessage());
+//                    try {
+//                        StringWriter errors = new StringWriter();
+//                        e.printStackTrace(new PrintWriter(errors));
+//         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//         			} catch (GFacException e1) {
+//         				 log.error(e1.getLocalizedMessage());
+//         			}
+//                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//                }
+//            }
+//            String standardError = jobExecutionContext.getStandardError();
+//            String standardOutput = jobExecutionContext.getStandardOutput();
+//            super.invoke(jobExecutionContext);
+//            // Server info
+//            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){
+//                try{
+//                    URL outputPathURL = new URL(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir());
+//                    this.userName = outputPathURL.getUserInfo();
+//                    this.hostName = outputPathURL.getHost();
+//                    outputPath = outputPathURL.getPath();
+//                } catch (MalformedURLException e) {
+//                    log.error(e.getLocalizedMessage(),e);
+//                }
+//            }
+//            String key = GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
+//            remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
+//            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && !jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().isPersistOutputData()){
+//            outputPath = outputPath + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID()
+//                    + File.separator;
+//                remoteCluster.makeDirectory(outputPath);
+//            }
+//            remoteCluster.scpTo(outputPath, standardError);
+//            remoteCluster.scpTo(outputPath, standardOutput);
+//            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
+//            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
+//            Set<String> keys = output.keySet();
+//            for (String paramName : keys) {
+//                OutputDataObjectType outputDataObjectType = (OutputDataObjectType) output.get(paramName);
+//                if (outputDataObjectType.getType() == DataType.URI) {
+//                    // for failed jobs outputs are not generated. So we should not download outputs
+//                    if (GFacUtils.isFailedJob(jobExecutionContext)){
+//                        continue;
+//                    }
+//                	String downloadFile = outputDataObjectType.getValue();
+//                    if(downloadFile == null || !(new File(downloadFile).isFile())){
+//                        GFacUtils.saveErrorDetails(jobExecutionContext, "Empty Output returned from the application", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//                		throw new GFacHandlerException("Empty Output returned from the application.." );
+//                	}
+//                	remoteCluster.scpTo(outputPath, downloadFile);
+//                    String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
+//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
+//                    dataObjectType.setName(paramName);
+//                    dataObjectType.setType(DataType.URI);
+//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
+//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
+//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
+//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
+//                    outputArray.add(dataObjectType);
+//                }else if (outputDataObjectType.getType() == DataType.STDOUT) {
+//                    remoteCluster.scpTo(outputPath, standardOutput);
+//                    String fileName = standardOutput.substring(standardOutput.lastIndexOf(File.separatorChar)+1, standardOutput.length());
+//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
+//                    dataObjectType.setName(paramName);
+//                    dataObjectType.setType(DataType.STDOUT);
+//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
+//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
+//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
+//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
+//                    outputArray.add(dataObjectType);
+//                }else if (outputDataObjectType.getType() == DataType.STDERR) {
+//                    remoteCluster.scpTo(outputPath, standardError);
+//                    String fileName = standardError.substring(standardError.lastIndexOf(File.separatorChar)+1, standardError.length());
+//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
+//                    dataObjectType.setName(paramName);
+//                    dataObjectType.setType(DataType.STDERR);
+//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
+//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
+//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
+//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
+//                    outputArray.add(dataObjectType);
+//                }
+//             }
+//           experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
+//        } catch (SSHApiException e) {
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//			} catch (GFacException e1) {
+//				 log.error(e1.getLocalizedMessage());
+//			}
+//            log.error("Error transfering files to remote host : " + hostName + " with the user: " + userName);
+//            log.error(e.getMessage());
+//            throw new GFacHandlerException(e);
+//        } catch (Exception e) {
+//        	 try {
+// 				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+// 			} catch (GFacException e1) {
+// 				 log.error(e1.getLocalizedMessage());
+// 			}
+//        	throw new GFacHandlerException(e);
+//        }
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
index 254b028..5dc9f2a 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
@@ -1,78 +1,78 @@
-package org.apache.airavata.gfac.ssh.handler;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.gfac.ssh.util.HandleOutputs;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.apache.airavata.registry.cpi.RegistryException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class NewSSHOutputHandler extends AbstractHandler{
-
-	 private static final Logger log = LoggerFactory.getLogger(NewSSHOutputHandler.class);
-
-	    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-	        String hostAddress = jobExecutionContext.getHostName();
-	      	RemoteCluster remoteCluster = null;
-	      	// Security Context and connection
-	        try {
-	            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-	                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-	            }
-	            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-	            if (remoteCluster == null) {
-	                throw new GFacProviderException("Security context is not set properly");
-	            } else {
-	                log.info("Successfully retrieved the Security Context");
-	            }
-	        } catch (Exception e) {
-	            log.error(e.getMessage());
-	            try {
-                    StringWriter errors = new StringWriter();
-                    e.printStackTrace(new PrintWriter(errors));
-	                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-	            } catch (GFacException e1) {
-	                log.error(e1.getLocalizedMessage());
-	            }
-	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-	        }
-
-	        super.invoke(jobExecutionContext);
-	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
-	        try {
-				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-			} catch (RegistryException e) {
-				throw new GFacHandlerException(e);
-			}
-
-	       
-	    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    @Override
-	public void initProperties(Properties properties) throws GFacHandlerException {
-		// TODO Auto-generated method stub
-		
-	}
-
-}
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.List;
+//import java.util.Properties;
+//
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.gfac.ssh.util.HandleOutputs;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.apache.airavata.model.experiment.CorrectiveAction;
+//import org.apache.airavata.model.experiment.ErrorCategory;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.apache.airavata.registry.cpi.RegistryException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class NewSSHOutputHandler extends AbstractHandler{
+//
+//	 private static final Logger log = LoggerFactory.getLogger(NewSSHOutputHandler.class);
+//
+//	    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//	        String hostAddress = jobExecutionContext.getHostName();
+//	      	RemoteCluster remoteCluster = null;
+//	      	// Security Context and connection
+//	        try {
+//	            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//	                GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//	            }
+//	            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//	            if (remoteCluster == null) {
+//	                throw new GFacProviderException("Security context is not set properly");
+//	            } else {
+//	                log.info("Successfully retrieved the Security Context");
+//	            }
+//	        } catch (Exception e) {
+//	            log.error(e.getMessage());
+//	            try {
+//                    StringWriter errors = new StringWriter();
+//                    e.printStackTrace(new PrintWriter(errors));
+//	                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//	            } catch (GFacException e1) {
+//	                log.error(e1.getLocalizedMessage());
+//	            }
+//	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//	        }
+//
+//	        super.invoke(jobExecutionContext);
+//	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
+//	        try {
+//				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
+//			} catch (RegistryException e) {
+//				throw new GFacHandlerException(e);
+//			}
+//
+//
+//	    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    @Override
+//	public void initProperties(Properties properties) throws GFacHandlerException {
+//		// TODO Auto-generated method stub
+//
+//	}
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
index 1c7a7a6..d8afb06 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
@@ -1,119 +1,119 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.model.experiment.*;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Properties;
-
-public class SSHDirectorySetupHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(SSHDirectorySetupHandler.class);
-
-	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage());
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
- 				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
- 			} catch (GFacException e1) {
- 				 log.error(e1.getLocalizedMessage());
- 			}
-            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        } 
-
-        log.info("Setup SSH job directorties");
-        super.invoke(jobExecutionContext);
-        makeDirectory(jobExecutionContext);
-
-	}
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-		RemoteCluster remoteCluster = null;
-		try{
-            String hostAddress = jobExecutionContext.getHostName();
-            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-        if (remoteCluster == null) {
-            throw new GFacHandlerException("Security context is not set properly");
-        } else {
-            log.info("Successfully retrieved the Security Context");
-        }
-            String workingDirectory = jobExecutionContext.getWorkingDir();
-            remoteCluster.makeDirectory(workingDirectory);
-            if(!jobExecutionContext.getInputDir().equals(workingDirectory))
-            	remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
-            if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
-            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
-            
-            DataTransferDetails detail = new DataTransferDetails();
-            TransferStatus status = new TransferStatus();
-            status.setTransferState(TransferState.DIRECTORY_SETUP);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("Working directory = " + workingDirectory);
-
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-        } catch (Exception e) {
-			DataTransferDetails detail = new DataTransferDetails();
-            TransferStatus status = new TransferStatus();
-            status.setTransferState(TransferState.FAILED);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("Working directory = " + jobExecutionContext.getWorkingDir());
-            try {
-                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-            } catch (Exception e1) {
-                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error executing the Handler: " + SSHDirectorySetupHandler.class, e);
-        }
-        
-	}
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.model.experiment.*;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.Properties;
+//
+//public class SSHDirectorySetupHandler extends AbstractHandler {
+//    private static final Logger log = LoggerFactory.getLogger(SSHDirectorySetupHandler.class);
+//
+//	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        try {
+//            String hostAddress = jobExecutionContext.getHostName();
+//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//                GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//            }
+//        } catch (Exception e) {
+//            log.error(e.getMessage());
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+// 				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+// 			} catch (GFacException e1) {
+// 				 log.error(e1.getLocalizedMessage());
+// 			}
+//            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//        }
+//
+//        log.info("Setup SSH job directorties");
+//        super.invoke(jobExecutionContext);
+//        makeDirectory(jobExecutionContext);
+//
+//	}
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//		RemoteCluster remoteCluster = null;
+//		try{
+//            String hostAddress = jobExecutionContext.getHostName();
+//            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//        if (remoteCluster == null) {
+//            throw new GFacHandlerException("Security context is not set properly");
+//        } else {
+//            log.info("Successfully retrieved the Security Context");
+//        }
+//            String workingDirectory = jobExecutionContext.getWorkingDir();
+//            remoteCluster.makeDirectory(workingDirectory);
+//            if(!jobExecutionContext.getInputDir().equals(workingDirectory))
+//            	remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
+//            if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
+//            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
+//
+//            DataTransferDetails detail = new DataTransferDetails();
+//            TransferStatus status = new TransferStatus();
+//            status.setTransferState(TransferState.DIRECTORY_SETUP);
+//            detail.setTransferStatus(status);
+//            detail.setTransferDescription("Working directory = " + workingDirectory);
+//
+//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//
+//        } catch (Exception e) {
+//			DataTransferDetails detail = new DataTransferDetails();
+//            TransferStatus status = new TransferStatus();
+//            status.setTransferState(TransferState.FAILED);
+//            detail.setTransferStatus(status);
+//            detail.setTransferDescription("Working directory = " + jobExecutionContext.getWorkingDir());
+//            try {
+//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
+//            } catch (Exception e1) {
+//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
+//            }
+//            throw new GFacHandlerException("Error executing the Handler: " + SSHDirectorySetupHandler.class, e);
+//        }
+//
+//	}
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
index 86584ca..b1e485a 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
@@ -1,198 +1,198 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.experiment.*;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-
-public class SSHInputHandler extends AbstractHandler {
-
-    private static final Logger log = LoggerFactory.getLogger(SSHInputHandler.class);
-
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        DataTransferDetails detail = new DataTransferDetails();
-        detail.setTransferDescription("Input Data Staging");
-        TransferStatus status = new TransferStatus();
-        int index = 0;
-        int oldIndex = 0;
-        List<String> oldFiles = new ArrayList<String>();
-        StringBuffer data = new StringBuffer("|");
-        MessageContext inputNew = new MessageContext();
-        RemoteCluster remoteCluster = null;
-        
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                try {
-                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
-                } catch (ApplicationSettingsException e) {
-                    log.error(e.getMessage());
-                    try {
-                        StringWriter errors = new StringWriter();
-                        e.printStackTrace(new PrintWriter(errors));
-         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-         			} catch (GFacException e1) {
-         				 log.error(e1.getLocalizedMessage());
-         			}
-                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-                }
-            }
-
-            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-            if (remoteCluster == null) {
-                throw new GFacException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-            log.info("Invoking SCPInputHandler");
-            super.invoke(jobExecutionContext);
-
-
-            MessageContext input = jobExecutionContext.getInMessageContext();
-            Set<String> parameters = input.getParameters().keySet();
-            for (String paramName : parameters) {
-                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-                String paramValue = inputParamType.getValue();
-                //TODO: Review this with type
-                if (inputParamType.getType() == DataType.URI) {
-                    if (index < oldIndex) {
-                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-                        inputParamType.setValue(oldFiles.get(index));
-                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-                    } else {
-                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
-                        inputParamType.setValue(stageInputFile);
-                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
-                        status.setTransferState(TransferState.UPLOAD);
-                        detail.setTransferStatus(status);
-                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
-                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                    }
-                }// FIXME: what is the thrift model DataType equivalent for URIArray type?
-//                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-//                	if (index < oldIndex) {
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.common.exception.ApplicationSettingsException;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.airavata.model.experiment.*;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.IOException;
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.ArrayList;
+//import java.util.List;
+//import java.util.Properties;
+//import java.util.Set;
+//
+//public class SSHInputHandler extends AbstractHandler {
+//
+//    private static final Logger log = LoggerFactory.getLogger(SSHInputHandler.class);
+//
+//
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        DataTransferDetails detail = new DataTransferDetails();
+//        detail.setTransferDescription("Input Data Staging");
+//        TransferStatus status = new TransferStatus();
+//        int index = 0;
+//        int oldIndex = 0;
+//        List<String> oldFiles = new ArrayList<String>();
+//        StringBuffer data = new StringBuffer("|");
+//        MessageContext inputNew = new MessageContext();
+//        RemoteCluster remoteCluster = null;
+//
+//        try {
+//            String hostAddress = jobExecutionContext.getHostName();
+//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//                try {
+//                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//                } catch (ApplicationSettingsException e) {
+//                    log.error(e.getMessage());
+//                    try {
+//                        StringWriter errors = new StringWriter();
+//                        e.printStackTrace(new PrintWriter(errors));
+//         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//         			} catch (GFacException e1) {
+//         				 log.error(e1.getLocalizedMessage());
+//         			}
+//                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//                }
+//            }
+//
+//            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//            if (remoteCluster == null) {
+//                throw new GFacException("Security context is not set properly");
+//            } else {
+//                log.info("Successfully retrieved the Security Context");
+//            }
+//            log.info("Invoking SCPInputHandler");
+//            super.invoke(jobExecutionContext);
+//
+//
+//            MessageContext input = jobExecutionContext.getInMessageContext();
+//            Set<String> parameters = input.getParameters().keySet();
+//            for (String paramName : parameters) {
+//                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
+//                String paramValue = inputParamType.getValue();
+//                //TODO: Review this with type
+//                if (inputParamType.getType() == DataType.URI) {
+//                    if (index < oldIndex) {
 //                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-//                        ((URIParameterType) actualParameter.getType()).setValue(oldFiles.get(index));
+//                        inputParamType.setValue(oldFiles.get(index));
 //                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-//                    }else{
-//                	List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-//                    List<String> newFiles = new ArrayList<String>();
-//                    for (String paramValueEach : split) {
-//                        String stageInputFiles = stageInputFiles(remoteCluster,jobExecutionContext, paramValueEach);
+//                    } else {
+//                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
+//                        inputParamType.setValue(stageInputFile);
+//                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
 //                        status.setTransferState(TransferState.UPLOAD);
 //                        detail.setTransferStatus(status);
-//                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
-//                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//                        newFiles.add(stageInputFiles);
-//                        StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
-//                        GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-//                    }
-//                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+//                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
+//                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//
+//                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
 //                    }
-//                }
-                inputNew.getParameters().put(paramName, inputParamType);
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage());
-            status.setTransferState(TransferState.FAILED);
-            detail.setTransferStatus(status);
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-            } catch (Exception e1) {
-                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-        jobExecutionContext.setInMessageContext(inputNew);
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-        int i = paramValue.lastIndexOf(File.separator);
-        String substring = paramValue.substring(i + 1);
-        try {
-            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
-            if(paramValue.startsWith("scp:")){
-            	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-            	remoteCluster.scpThirdParty(paramValue, targetFile);
-            }else{
-            if(paramValue.startsWith("file")){
-                paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-            }
-            boolean success = false;
-            int j = 1;
-            while(!success){
-            try {
-				remoteCluster.scpTo(targetFile, paramValue);
-				success = true;
-			} catch (Exception e) {
-				log.info(e.getLocalizedMessage());
-				Thread.sleep(2000);
-				 if(j==3) {
-					throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-				 }
-            }
-            j++;
-            }
-            }
-            return targetFile;
-        } catch (Exception e) {
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
+//                }// FIXME: what is the thrift model DataType equivalent for URIArray type?
+////                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
+////                	if (index < oldIndex) {
+////                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
+////                        ((URIParameterType) actualParameter.getType()).setValue(oldFiles.get(index));
+////                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
+////                    }else{
+////                	List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
+////                    List<String> newFiles = new ArrayList<String>();
+////                    for (String paramValueEach : split) {
+////                        String stageInputFiles = stageInputFiles(remoteCluster,jobExecutionContext, paramValueEach);
+////                        status.setTransferState(TransferState.UPLOAD);
+////                        detail.setTransferStatus(status);
+////                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
+////                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+////                        newFiles.add(stageInputFiles);
+////                        StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
+////                        GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
+////                    }
+////                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+////                    }
+////                }
+//                inputNew.getParameters().put(paramName, inputParamType);
+//            }
+//        } catch (Exception e) {
+//            log.error(e.getMessage());
+//            status.setTransferState(TransferState.FAILED);
+//            detail.setTransferStatus(status);
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
+//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//            } catch (Exception e1) {
+//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
+//            }
+//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+//        }
+//        jobExecutionContext.setInMessageContext(inputNew);
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
+//        int i = paramValue.lastIndexOf(File.separator);
+//        String substring = paramValue.substring(i + 1);
+//        try {
+//            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
+//            if(paramValue.startsWith("scp:")){
+//            	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
+//            	remoteCluster.scpThirdParty(paramValue, targetFile);
+//            }else{
+//            if(paramValue.startsWith("file")){
+//                paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
+//            }
+//            boolean success = false;
+//            int j = 1;
+//            while(!success){
+//            try {
+//				remoteCluster.scpTo(targetFile, paramValue);
+//				success = true;
+//			} catch (Exception e) {
+//				log.info(e.getLocalizedMessage());
+//				Thread.sleep(2000);
+//				 if(j==3) {
+//					throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+//				 }
+//            }
+//            j++;
+//            }
+//            }
+//            return targetFile;
+//        } catch (Exception e) {
+//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+//        }
+//    }
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//}


[5/5] airavata git commit: fixing compilation issues

Posted by ch...@apache.org.
fixing compilation issues


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/df3fbe6a
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/df3fbe6a
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/df3fbe6a

Branch: refs/heads/master
Commit: df3fbe6aa3d8f477bab3275e620d77bfb924bae0
Parents: a2dce4f
Author: Chathuri Wimalasena <ch...@apache.org>
Authored: Wed Jun 24 10:14:54 2015 -0400
Committer: Chathuri Wimalasena <ch...@apache.org>
Committed: Wed Jun 24 10:14:54 2015 -0400

----------------------------------------------------------------------
 .../gaussian/handler/GaussianHandler.java       |  326 ++---
 .../org/apache/airavata/gfac/core/GFac.java     |   10 +-
 .../apache/airavata/gfac/core/GFacUtils.java    |   39 +-
 .../SSHPublicKeyAuthentication.java             |    2 +-
 .../SSHPublicKeyFileAuthentication.java         |    2 +-
 .../gfac/core/context/JobExecutionContext.java  |  988 +++++++--------
 .../airavata/gfac/core/monitor/MonitorID.java   |   30 +-
 .../java/com/jcraft/jsch/GSSContextX509.java    |  205 ++++
 .../UserAuthGSSAPIWithMICGSSCredentials.java    |    1 -
 .../gfac/impl/task/SSHJobSubmissionTask.java    |   26 +-
 .../handler/LocalDirectorySetupHandler.java     |  124 +-
 .../gfac/local/handler/LocalInputHandler.java   |  184 +--
 .../gfac/local/provider/impl/LocalProvider.java |  618 +++++-----
 .../gfac/local/utils/LocalProviderUtil.java     |  102 +-
 .../ssh/handler/AdvancedSCPInputHandler.java    |  454 +++----
 .../ssh/handler/AdvancedSCPOutputHandler.java   |  450 +++----
 .../gfac/ssh/handler/NewSSHOutputHandler.java   |  156 +--
 .../ssh/handler/SSHDirectorySetupHandler.java   |  238 ++--
 .../gfac/ssh/handler/SSHInputHandler.java       |  386 +++---
 .../gfac/ssh/handler/SSHOutputHandler.java      |  510 ++++----
 .../gfac/ssh/provider/impl/SSHProvider.java     |  946 +++++++--------
 .../airavata/gfac/ssh/util/GFACSSHUtils.java    | 1124 +++++++++---------
 .../airavata/gfac/ssh/util/HandleOutputs.java   |  192 +--
 .../apache/airavata/gfac/server/GfacServer.java |    4 +-
 24 files changed, 3649 insertions(+), 3468 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-application-specific-handlers/src/main/java/org/apache/airavata/application/gaussian/handler/GaussianHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-application-specific-handlers/src/main/java/org/apache/airavata/application/gaussian/handler/GaussianHandler.java b/modules/gfac/gfac-application-specific-handlers/src/main/java/org/apache/airavata/application/gaussian/handler/GaussianHandler.java
index 074786a..2481ec9 100644
--- a/modules/gfac/gfac-application-specific-handlers/src/main/java/org/apache/airavata/application/gaussian/handler/GaussianHandler.java
+++ b/modules/gfac/gfac-application-specific-handlers/src/main/java/org/apache/airavata/application/gaussian/handler/GaussianHandler.java
@@ -1,163 +1,163 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.application.gaussian.handler;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
-import org.apache.airavata.registry.cpi.RegistryException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-public class GaussianHandler extends AbstractHandler {
-
-    private static final Logger logger = LoggerFactory.getLogger(GaussianHandler.class);
-    public static final String LINK_SECTION = "%";
-    public static final String ROUTE_SECTION = "#";
-    public static final String PROC_SHARED = "procshared";
-    public static final String MEM = "mem";
-    public static final String CHK = "chk";
-    public static final String PROC = "proc";
-
-    public static final String EQUAL = "=";
-    public static final String OPEN_PARENTHESES = "(";
-    public static final String CLOSE_PARENTHESES = ")";
-
-    private String mainInputFilePath;
-    @Override
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        super.invoke(jobExecutionContext);
-        Map<String, String> configurations;
-        Map<String, Object> parameters = jobExecutionContext.getInMessageContext().getParameters();
-        // only get first input file, which is the main input file
-        for (Object paraValue : parameters.values()) {
-            if (paraValue instanceof InputDataObjectType) {
-                InputDataObjectType inputDataObjectType = (InputDataObjectType) paraValue;
-                mainInputFilePath = inputDataObjectType.getValue();
-                break;
-            }
-        }
-        if (mainInputFilePath != null) {
-            File mainInputFile = new File(mainInputFilePath);
-            if (mainInputFile.exists()) {
-                try {
-                    configurations = parseGaussianInputFile(mainInputFile);
-                    ComputationalResourceScheduling taskScheduling = jobExecutionContext.getTaskData().getTaskScheduling();
-                    for (Map.Entry<String, String> inputConfig : configurations.entrySet()) {
-                        if (inputConfig.getKey().equals(PROC_SHARED)) {
-                            taskScheduling.setTotalCPUCount(Integer.parseInt(inputConfig.getValue()));
-                        } else if (inputConfig.getKey().equals(MEM)) {
-                            int userRequestedMem = Integer.parseInt(inputConfig.getValue());
-                            int additionalMem = (int) (userRequestedMem * 0.2);
-                            // TODO check (userRequestedMem + additionalMem)  > maxNode or Queue allowed Mem
-                            taskScheduling.setTotalPhysicalMemory(userRequestedMem + additionalMem);
-                        } else if (inputConfig.getKey().equals(PROC)) {
-                            taskScheduling.setTotalCPUCount(Integer.parseInt(inputConfig.getValue()));
-                        } else {
-                            // TODO - handle other input configurations
-                        }
-                        logger.info("$$$$$$$$ " + inputConfig.getKey() + " --> " + inputConfig.getValue() + " $$$$$$$$$$$");
-                    }
-                    experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, jobExecutionContext.getTaskData(), jobExecutionContext.getTaskData().getTaskID());
-                } catch (IOException e) {
-                    throw new GFacHandlerException("Error while reading main input file ", e);
-                } catch (RegistryException e) {
-                    throw new GFacHandlerException("Error while updating task details", e);
-                }
-            } else {
-                throw new GFacHandlerException("Main input file doesn't exists " + mainInputFilePath);
-            }
-
-        } else {
-            throw new GFacHandlerException("Main input file path shouldn't be null");
-        }
-
-    }
-
-    /*   %procshared=6  , put this line to the map key:procshared , value:6
-       keyword = option
-       keyword(option)
-       keyword=(option1, option2, …)
-       keyword(option1, option2, …)*/
-    // TODO - make this method private
-    public Map<String, String> parseGaussianInputFile(File mainInputFile) throws IOException {
-        Map<String, String> configs = new HashMap<String, String>();
-        BufferedReader br = new BufferedReader(new FileReader(mainInputFile));
-        String line = br.readLine();
-        while (line != null) {
-            line = line.trim();
-            String keyword = null;
-            String withoutKeyword = null;
-            String option = null;
-            if (line.startsWith(LINK_SECTION)) {
-                int equalIndex = line.indexOf(EQUAL);
-                int openParenthesesIndex = line.indexOf(OPEN_PARENTHESES);
-                // read the keyword
-                if (equalIndex > 0) {
-                    keyword = line.substring(1, equalIndex).trim();
-                    withoutKeyword = line.substring(equalIndex + 1, line.length()); // remove up to = sign
-                } else if (openParenthesesIndex > 0) {
-                    keyword = line.substring(1, openParenthesesIndex).trim();
-                    withoutKeyword = line.substring(openParenthesesIndex, line.length()); // remove left side of ( sign
-                } else {
-                    // TODO - malformed input configuration
-                }
-                // read the option
-                if (openParenthesesIndex > 0) {
-                    if (withoutKeyword.endsWith(CLOSE_PARENTHESES)) {
-                        option = withoutKeyword.substring(1, withoutKeyword.length() - 1);
-                    } else {
-                        //TODO -  malformed input configuration
-                    }
-                } else {
-                    option = withoutKeyword.trim();
-                }
-                configs.put(keyword, option);
-            } else if (line.startsWith(ROUTE_SECTION)) {
-                // parse the line
-            }
-            line = br.readLine();
-        }
-        return configs;
-    }
-
-    @Override
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // Implement this method if we need safe recover steps before rerun the task.
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.application.gaussian.handler;
+//
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.airavata.model.experiment.ComputationalResourceScheduling;
+//import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
+//import org.apache.airavata.registry.cpi.RegistryException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.BufferedReader;
+//import java.io.File;
+//import java.io.FileReader;
+//import java.io.IOException;
+//import java.util.HashMap;
+//import java.util.Map;
+//import java.util.Properties;
+//
+//public class GaussianHandler extends AbstractHandler {
+//
+//    private static final Logger logger = LoggerFactory.getLogger(GaussianHandler.class);
+//    public static final String LINK_SECTION = "%";
+//    public static final String ROUTE_SECTION = "#";
+//    public static final String PROC_SHARED = "procshared";
+//    public static final String MEM = "mem";
+//    public static final String CHK = "chk";
+//    public static final String PROC = "proc";
+//
+//    public static final String EQUAL = "=";
+//    public static final String OPEN_PARENTHESES = "(";
+//    public static final String CLOSE_PARENTHESES = ")";
+//
+//    private String mainInputFilePath;
+//    @Override
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        super.invoke(jobExecutionContext);
+//        Map<String, String> configurations;
+//        Map<String, Object> parameters = jobExecutionContext.getInMessageContext().getParameters();
+//        // only get first input file, which is the main input file
+//        for (Object paraValue : parameters.values()) {
+//            if (paraValue instanceof InputDataObjectType) {
+//                InputDataObjectType inputDataObjectType = (InputDataObjectType) paraValue;
+//                mainInputFilePath = inputDataObjectType.getValue();
+//                break;
+//            }
+//        }
+//        if (mainInputFilePath != null) {
+//            File mainInputFile = new File(mainInputFilePath);
+//            if (mainInputFile.exists()) {
+//                try {
+//                    configurations = parseGaussianInputFile(mainInputFile);
+//                    ComputationalResourceScheduling taskScheduling = jobExecutionContext.getTaskData().getTaskScheduling();
+//                    for (Map.Entry<String, String> inputConfig : configurations.entrySet()) {
+//                        if (inputConfig.getKey().equals(PROC_SHARED)) {
+//                            taskScheduling.setTotalCPUCount(Integer.parseInt(inputConfig.getValue()));
+//                        } else if (inputConfig.getKey().equals(MEM)) {
+//                            int userRequestedMem = Integer.parseInt(inputConfig.getValue());
+//                            int additionalMem = (int) (userRequestedMem * 0.2);
+//                            // TODO check (userRequestedMem + additionalMem)  > maxNode or Queue allowed Mem
+//                            taskScheduling.setTotalPhysicalMemory(userRequestedMem + additionalMem);
+//                        } else if (inputConfig.getKey().equals(PROC)) {
+//                            taskScheduling.setTotalCPUCount(Integer.parseInt(inputConfig.getValue()));
+//                        } else {
+//                            // TODO - handle other input configurations
+//                        }
+//                        logger.info("$$$$$$$$ " + inputConfig.getKey() + " --> " + inputConfig.getValue() + " $$$$$$$$$$$");
+//                    }
+//                    experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, jobExecutionContext.getTaskData(), jobExecutionContext.getTaskData().getTaskID());
+//                } catch (IOException e) {
+//                    throw new GFacHandlerException("Error while reading main input file ", e);
+//                } catch (RegistryException e) {
+//                    throw new GFacHandlerException("Error while updating task details", e);
+//                }
+//            } else {
+//                throw new GFacHandlerException("Main input file doesn't exists " + mainInputFilePath);
+//            }
+//
+//        } else {
+//            throw new GFacHandlerException("Main input file path shouldn't be null");
+//        }
+//
+//    }
+//
+//    /*   %procshared=6  , put this line to the map key:procshared , value:6
+//       keyword = option
+//       keyword(option)
+//       keyword=(option1, option2, …)
+//       keyword(option1, option2, …)*/
+//    // TODO - make this method private
+//    public Map<String, String> parseGaussianInputFile(File mainInputFile) throws IOException {
+//        Map<String, String> configs = new HashMap<String, String>();
+//        BufferedReader br = new BufferedReader(new FileReader(mainInputFile));
+//        String line = br.readLine();
+//        while (line != null) {
+//            line = line.trim();
+//            String keyword = null;
+//            String withoutKeyword = null;
+//            String option = null;
+//            if (line.startsWith(LINK_SECTION)) {
+//                int equalIndex = line.indexOf(EQUAL);
+//                int openParenthesesIndex = line.indexOf(OPEN_PARENTHESES);
+//                // read the keyword
+//                if (equalIndex > 0) {
+//                    keyword = line.substring(1, equalIndex).trim();
+//                    withoutKeyword = line.substring(equalIndex + 1, line.length()); // remove up to = sign
+//                } else if (openParenthesesIndex > 0) {
+//                    keyword = line.substring(1, openParenthesesIndex).trim();
+//                    withoutKeyword = line.substring(openParenthesesIndex, line.length()); // remove left side of ( sign
+//                } else {
+//                    // TODO - malformed input configuration
+//                }
+//                // read the option
+//                if (openParenthesesIndex > 0) {
+//                    if (withoutKeyword.endsWith(CLOSE_PARENTHESES)) {
+//                        option = withoutKeyword.substring(1, withoutKeyword.length() - 1);
+//                    } else {
+//                        //TODO -  malformed input configuration
+//                    }
+//                } else {
+//                    option = withoutKeyword.trim();
+//                }
+//                configs.put(keyword, option);
+//            } else if (line.startsWith(ROUTE_SECTION)) {
+//                // parse the line
+//            }
+//            line = br.readLine();
+//        }
+//        return configs;
+//    }
+//
+//    @Override
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // Implement this method if we need safe recover steps before rerun the task.
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFac.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFac.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFac.java
index ca3f5b2..4050ef0 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFac.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFac.java
@@ -21,8 +21,8 @@
 package org.apache.airavata.gfac.core;
 
 import org.apache.airavata.common.utils.LocalEventPublisher;
+import org.apache.airavata.gfac.core.context.ProcessContext;
 import org.apache.airavata.registry.cpi.AppCatalog;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.registry.cpi.ExperimentCatalog;
 import org.apache.curator.framework.CuratorFramework;
 
@@ -54,17 +54,17 @@ public interface GFac {
 
     /**
      * This method can be used in a handler to ivvoke outhandler asynchronously
-     * @param jobExecutionContext
+     * @param processContext
      * @throws GFacException
      */
-    public void invokeOutFlowHandlers(JobExecutionContext jobExecutionContext) throws GFacException;
+    public void invokeOutFlowHandlers(ProcessContext processContext) throws GFacException;
 
     /**
      * This method can be used to handle re-run case asynchronously
-     * @param jobExecutionContext
+     * @param processContext
      * @throws GFacException
      */
-    public void reInvokeOutFlowHandlers(JobExecutionContext jobExecutionContext) throws GFacException;
+    public void reInvokeOutFlowHandlers(ProcessContext processContext) throws GFacException;
 
     /**
      * This operation can be used to cancel an already running experiment

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
index c7b2093..6a9f63a 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
@@ -26,7 +26,6 @@ import org.apache.airavata.common.utils.DBUtil;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.credential.store.store.CredentialReader;
 import org.apache.airavata.credential.store.store.impl.CredentialReaderImpl;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.ProcessContext;
 import org.apache.airavata.gfac.core.context.TaskContext;
 import org.apache.airavata.gfac.core.watcher.CancelRequestWatcher;
@@ -609,18 +608,18 @@ public class GFacUtils {
 //		}
 //	}
 
-    public static String getHandlerData(JobExecutionContext jobExecutionContext, String className) throws Exception {
-        CuratorFramework curatorClient = jobExecutionContext.getCuratorClient();
-        if (curatorClient != null) {
-            String expZnodeHandlerPath = AiravataZKUtils
-                    .getExpZnodeHandlerPath(
-                            jobExecutionContext.getExperimentID(),
-                            className);
-            Stat exists = curatorClient.checkExists().forPath(expZnodeHandlerPath);
-            return new String(jobExecutionContext.getCuratorClient().getData().storingStatIn(exists).forPath(expZnodeHandlerPath));
-        }
-        return null;
-    }
+//    public static String getHandlerData(ProcessContext processContext, String className) throws Exception {
+//        CuratorFramework curatorClient = processContext.getCuratorClient();
+//        if (curatorClient != null) {
+//            String expZnodeHandlerPath = AiravataZKUtils
+//                    .getExpZnodeHandlerPath(
+//                            processContext.getExperimentID(),
+//                            className);
+//            Stat exists = curatorClient.checkExists().forPath(expZnodeHandlerPath);
+//            return new String(processContext.getCuratorClient().getData().storingStatIn(exists).forPath(expZnodeHandlerPath));
+//        }
+//        return null;
+//    }
 
     public static CredentialReader getCredentialReader()
             throws ApplicationSettingsException, IllegalAccessException,
@@ -730,13 +729,13 @@ public class GFacUtils {
         return details.getExperimentStatus().getState();
     }
 
-    public static boolean isFailedJob(JobExecutionContext jec) {
-//        JobStatus jobStatus = jec.getJobDetails().getJobStatus();
-//        if (jobStatus.getJobState() == JobState.FAILED) {
-//            return true;
-//        }
-        return false;
-    }
+//    public static boolean isFailedJob(JobExecutionContext jec) {
+////        JobStatus jobStatus = jec.getJobDetails().getJobStatus();
+////        if (jobStatus.getJobState() == JobState.FAILED) {
+////            return true;
+////        }
+//        return false;
+//    }
 
     public static boolean ackCancelRequest(String experimentId, CuratorFramework curatorClient) throws Exception {
         String experimentEntry = GFacUtils.findExperimentEntry(experimentId, curatorClient);

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyAuthentication.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyAuthentication.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyAuthentication.java
index 8824f5b..a579445 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyAuthentication.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyAuthentication.java
@@ -33,7 +33,7 @@ package org.apache.airavata.gfac.core.authentication;
  * The public key and private key are returned as byte arrays. Useful when we store private key/public key
  * in a secure storage such as credential store. API user should implement this.
  */
-public interface SSHPublicKeyAuthentication extends SSHKeyAuthentication {
+public interface SSHPublicKeyAuthentication extends AuthenticationInfo {
 
     /**
      * Gets the public key as byte array.

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyFileAuthentication.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyFileAuthentication.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyFileAuthentication.java
index f98e945..25595fd 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyFileAuthentication.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/authentication/SSHPublicKeyFileAuthentication.java
@@ -30,7 +30,7 @@ package org.apache.airavata.gfac.core.authentication;/*
  * Public key authentication for vanilla SSH.
  * The public key and private key stored files are returned. API user should implement this.
  */
-public interface SSHPublicKeyFileAuthentication extends SSHKeyAuthentication {
+public interface SSHPublicKeyFileAuthentication extends AuthenticationInfo {
 
     /**
      * The file which contains the public key.

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index 6c993e2..e3fa5ba 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -1,494 +1,494 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.core.context;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.airavata.common.utils.LocalEventPublisher;
-import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
-import org.apache.airavata.registry.cpi.AppCatalog;
-import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.SecurityContext;
-import org.apache.airavata.gfac.core.GFac;
-import org.apache.airavata.gfac.core.provider.GFacProvider;
-import org.apache.airavata.model.appcatalog.computeresource.*;
-import org.apache.airavata.model.experiment.JobDetails;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.model.experiment.WorkflowNodeDetails;
-import org.apache.airavata.registry.cpi.ExperimentCatalog;
-import org.apache.curator.framework.CuratorFramework;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class JobExecutionContext extends AbstractContext implements Serializable{
-
-    private static final Logger log = LoggerFactory.getLogger(JobExecutionContext.class);
-    private GFacConfiguration gfacConfiguration;
-    private ApplicationContext applicationContext;
-    private MessageContext inMessageContext;
-    private MessageContext outMessageContext;
-    //FIXME : not needed for gfac
-    private Experiment experiment;
-    private TaskDetails taskData;
-    private JobDetails jobDetails;
-    // FIXME : not needed for gfac
-    private WorkflowNodeDetails workflowNodeDetails;
-    private GFac gfac;
-    private CuratorFramework curatorClient;
-    private String credentialStoreToken;
-    /**
-     * User defined scratch/temp directory
-     */
-    private String scratchLocation;
-    private String loginUserName;
-    /**
-     * User defined working directory.
-     */
-    private String workingDir;
-    /**
-     * Input data directory
-     */
-    private String inputDir;
-    /**
-     * Output data directory
-     */
-    private String outputDir;
-    /**
-     * standard output file path
-     */
-    private String standardOutput;
-    /**
-     * standard error file path
-     */
-    private String standardError;
-    /**
-     * User preferred job submission protocol.
-     */
-    private JobSubmissionProtocol preferredJobSubmissionProtocol;
-    /**
-     * User preferred data movement protocol.
-     */
-    private DataMovementProtocol preferredDataMovementProtocol;
-    /**
-     * List of job submission protocols sorted by priority order.
-     */
-    private List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces;
-    /**
-     * use preferred job submission protocol.
-     */
-    private JobSubmissionInterface preferredJobSubmissionInterface;
-
-    private ResourceJobManager resourceJobManager;
-    /**
-     * List of job submission protocols sorted by priority order.
-     */
-    private List<DataMovementInterface> hostPrioritizedDataMovementInterfaces;
-    /**
-     * use preferred job submission protocol.
-     */
-    private DataMovementInterface preferredDataMovementInterface;
-
-//    private ContextHeaderDocument.ContextHeader contextHeader;
-
-    // Keep track of the current path of the message. Before hitting provider its in-path.
-    // After provider its out-path.
-    private boolean inPath = true;
-
-    // Keep list of full qualified class names of GFac handlers which should invoked before
-    // the provider. This is specific to current job being executed.
-    private List<String> inHandlers = new ArrayList<String>();
-
-    // Keep list of full qualified class names of GFac handlers which should invoked after
-    // the provider. This is specific to current job being executed.
-    private List<String> outHandlers = new ArrayList<String>();
-
-    // During the execution of in-flow one of the handlers(Scheduling handler) will
-    // set this and GFac API will get it from the JobExecutionContext and execute the provider.
-    private GFacProvider provider;
-
-    // Service description is used by GFac to mainly specify input/output parameters for a job
-    // and to expose a job as a service to the outside world. This service concept abstract out
-    // a scientific application(or algorithm) as a service. Service name is there to identify to
-    // which service description we should refer during the execution of the current job represented
-    // by this context instance.
-    private String applicationName;
-    private String experimentID;
-    private AppCatalog appCatalog;
-    private String gatewayID;
-    private String status;
-    private List<String> outputFileList;
-    private ExperimentCatalog experimentCatalog;
-    private LocalEventPublisher localEventPublisher;
-
-    public String getGatewayID() {
-        return gatewayID;
-    }
-
-    public void setGatewayID(String gatewayID) {
-        this.gatewayID = gatewayID;
-    }
-
-
-    /**
-     *  Security context is used to handle authentication for input handlers and providers.
-     *  There can be multiple security requirement for a single job so this allows you to add multiple security types
-     *
-     */
-    private Map<String, SecurityContext> securityContext = new HashMap<String, SecurityContext>();
-
-    public JobExecutionContext(GFacConfiguration gFacConfiguration,String applicationName){
-        this.gfacConfiguration = gFacConfiguration;
-        setApplicationName(applicationName);
-        outputFileList = new ArrayList<String>();
-    }
-
-    public AppCatalog getAppCatalog() {
-        return appCatalog;
-    }
-
-    public void setAppCatalog(AppCatalog appCatalog) {
-        if (appCatalog == null) {
-            try {
-                this.appCatalog = RegistryFactory.getAppCatalog();
-            } catch (AppCatalogException e) {
-                log.error("Unable to create app catalog instance", e);
-            }
-        } else {
-            this.appCatalog = appCatalog;
-        }
-    }
-
-    public String getExperimentID() {
-        return experimentID;
-    }
-
-    public void setExperimentID(String experimentID) {
-        this.experimentID = experimentID;
-    }
-
-    public ApplicationContext getApplicationContext() {
-        return applicationContext;
-    }
-
-    public void setApplicationContext(ApplicationContext applicationContext) {
-        this.applicationContext = applicationContext;
-    }
-
-    public MessageContext getInMessageContext() {
-        return inMessageContext;
-    }
-
-    public void setInMessageContext(MessageContext inMessageContext) {
-        this.inMessageContext = inMessageContext;
-    }
-
-    public MessageContext getOutMessageContext() {
-        return outMessageContext;
-    }
-
-    public void setOutMessageContext(MessageContext outMessageContext) {
-        this.outMessageContext = outMessageContext;
-    }
-
-    public GFacConfiguration getGFacConfiguration() {
-        return gfacConfiguration;
-    }
-
-    public GFacProvider getProvider() {
-        return provider;
-    }
-
-    public void setProvider(GFacProvider provider) {
-        this.provider = provider;
-    }
-
-    public List<String> getInHandlers() {
-        return inHandlers;
-    }
-
-    public void setInHandlers(List<String> inHandlers) {
-        this.inHandlers = inHandlers;
-    }
-
-    public List<String> getOutHandlers() {
-        return outHandlers;
-    }
-
-    public void setOutHandlers(List<String> outHandlers) {
-        this.outHandlers = outHandlers;
-    }
-
-    public String getApplicationName() {
-        return applicationName;
-    }
-
-    public void setApplicationName(String applicationName) {
-        this.applicationName = applicationName;
-    }
-
-    public boolean isInPath() {
-        return inPath;
-    }
-
-    public TaskDetails getTaskData() {
-		return taskData;
-	}
-
-	public void setTaskData(TaskDetails taskData) {
-		this.taskData = taskData;
-	}
-
-	public boolean isOutPath(){
-        return !inPath;
-    }
-
-    public void setInPath() {
-        this.inPath = true;
-    }
-
-    public void setOutPath(){
-        this.inPath = false;
-    }
-
-    public ResourceJobManager getResourceJobManager() {
-        return resourceJobManager;
-    }
-
-    public void setResourceJobManager(ResourceJobManager resourceJobManager) {
-        this.resourceJobManager = resourceJobManager;
-    }
-
-    public SecurityContext getSecurityContext(String name) throws GFacException{
-		SecurityContext secContext = securityContext.get(name);
-		return secContext;
-	}
-
-	public void addSecurityContext(String name, SecurityContext value){
-		securityContext.put(name, value);
-    }
-
-	public String getStatus() {
-		return status;
-	}
-
-	public void setStatus(String status) {
-		this.status = status;
-	}
-
-    public JobDetails getJobDetails() {
-        return jobDetails;
-    }
-
-    public void setJobDetails(JobDetails jobDetails) {
-        this.jobDetails = jobDetails;
-    }
-
-    public void addOutputFile(String file) {
-        outputFileList.add(file);
-    }
-    public List<String> getOutputFiles(){
-        return outputFileList;
-    }
-
-    public ExperimentCatalog getExperimentCatalog() {
-        return experimentCatalog;
-    }
-
-    public Map<String, SecurityContext>  getAllSecurityContexts(){
-        return securityContext;
-    }
-
-    public void setExperimentCatalog(ExperimentCatalog experimentCatalog) {
-        this.experimentCatalog = experimentCatalog;
-    }
-
-    public Experiment getExperiment() {
-        return experiment;
-    }
-
-    public void setExperiment(Experiment experiment) {
-        this.experiment = experiment;
-    }
-
-    public WorkflowNodeDetails getWorkflowNodeDetails() {
-        return workflowNodeDetails;
-    }
-
-    public void setWorkflowNodeDetails(WorkflowNodeDetails workflowNodeDetails) {
-        this.workflowNodeDetails = workflowNodeDetails;
-    }
-
-    public GFac getGfac() {
-        return gfac;
-    }
-
-    public void setGfac(GFac gfac) {
-        this.gfac = gfac;
-    }
-
-    public String getCredentialStoreToken() {
-        return credentialStoreToken;
-    }
-
-    public void setCredentialStoreToken(String credentialStoreToken) {
-        this.credentialStoreToken = credentialStoreToken;
-    }
-
-    public String getScratchLocation() {
-        return scratchLocation;
-    }
-
-    public void setScratchLocation(String scratchLocation) {
-        this.scratchLocation = scratchLocation;
-    }
-
-    public String getWorkingDir() {
-        return workingDir;
-    }
-
-    public void setWorkingDir(String workingDir) {
-        this.workingDir = workingDir;
-    }
-
-    public String getInputDir() {
-        return inputDir;
-    }
-
-    public void setInputDir(String inputDir) {
-        this.inputDir = inputDir;
-    }
-
-    public String getOutputDir() {
-        return outputDir;
-    }
-
-    public void setOutputDir(String outputDir) {
-        this.outputDir = outputDir;
-    }
-
-    public String getStandardOutput() {
-        return standardOutput;
-    }
-
-    public void setStandardOutput(String standardOutput) {
-        this.standardOutput = standardOutput;
-    }
-
-    public String getStandardError() {
-        return standardError;
-    }
-
-    public void setStandardError(String standardError) {
-        this.standardError = standardError;
-    }
-
-    public JobSubmissionProtocol getPreferredJobSubmissionProtocol() {
-        return preferredJobSubmissionProtocol;
-    }
-
-    public void setPreferredJobSubmissionProtocol(JobSubmissionProtocol preferredJobSubmissionProtocol) {
-        this.preferredJobSubmissionProtocol = preferredJobSubmissionProtocol;
-    }
-
-    public DataMovementProtocol getPreferredDataMovementProtocol() {
-        return preferredDataMovementProtocol;
-    }
-
-    public void setPreferredDataMovementProtocol(DataMovementProtocol preferredDataMovementProtocol) {
-        this.preferredDataMovementProtocol = preferredDataMovementProtocol;
-    }
-
-    public List<JobSubmissionInterface> getHostPrioritizedJobSubmissionInterfaces() {
-        return hostPrioritizedJobSubmissionInterfaces;
-    }
-
-    public void setHostPrioritizedJobSubmissionInterfaces(List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces) {
-        this.hostPrioritizedJobSubmissionInterfaces = hostPrioritizedJobSubmissionInterfaces;
-    }
-
-    public JobSubmissionInterface getPreferredJobSubmissionInterface() {
-        return preferredJobSubmissionInterface;
-    }
-
-    public void setPreferredJobSubmissionInterface(JobSubmissionInterface preferredJobSubmissionInterface) {
-        this.preferredJobSubmissionInterface = preferredJobSubmissionInterface;
-    }
-
-    public String getHostName() {
-        return applicationContext.getComputeResourceDescription().getHostName();
-    }
-
-    public List<DataMovementInterface> getHostPrioritizedDataMovementInterfaces() {
-        return hostPrioritizedDataMovementInterfaces;
-    }
-
-    public void setHostPrioritizedDataMovementInterfaces(List<DataMovementInterface> hostPrioritizedDataMovementInterfaces) {
-        this.hostPrioritizedDataMovementInterfaces = hostPrioritizedDataMovementInterfaces;
-    }
-
-    public DataMovementInterface getPreferredDataMovementInterface() {
-        return preferredDataMovementInterface;
-    }
-
-    public void setPreferredDataMovementInterface(DataMovementInterface preferredDataMovementInterface) {
-        this.preferredDataMovementInterface = preferredDataMovementInterface;
-    }
-
-    public CuratorFramework getCuratorClient() {
-        return curatorClient;
-    }
-
-    public void setCuratorClient(CuratorFramework curatorClient) {
-        this.curatorClient = curatorClient;
-    }
-
-    public String getExecutablePath() {
-        if (applicationContext == null || applicationContext.getApplicationDeploymentDescription() == null) {
-            return null;
-        } else {
-            return applicationContext.getApplicationDeploymentDescription().getExecutablePath();
-        }
-    }
-
-
-
-    public String getLoginUserName() {
-        return loginUserName;
-    }
-
-    public void setLoginUserName(String loginUserName) {
-        this.loginUserName = loginUserName;
-    }
-
-    public LocalEventPublisher getLocalEventPublisher() {
-        return localEventPublisher;
-    }
-
-    public void setLocalEventPublisher(LocalEventPublisher localEventPublisher) {
-        this.localEventPublisher = localEventPublisher;
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//
+//package org.apache.airavata.gfac.core.context;
+//
+//import java.io.Serializable;
+//import java.util.ArrayList;
+//import java.util.HashMap;
+//import java.util.List;
+//import java.util.Map;
+//
+//import org.apache.airavata.common.utils.LocalEventPublisher;
+//import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
+//import org.apache.airavata.registry.cpi.AppCatalog;
+//import org.apache.airavata.registry.cpi.AppCatalogException;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.SecurityContext;
+//import org.apache.airavata.gfac.core.GFac;
+//import org.apache.airavata.gfac.core.provider.GFacProvider;
+//import org.apache.airavata.model.appcatalog.computeresource.*;
+//import org.apache.airavata.model.experiment.JobDetails;
+//import org.apache.airavata.model.experiment.TaskDetails;
+//import org.apache.airavata.model.experiment.WorkflowNodeDetails;
+//import org.apache.airavata.registry.cpi.ExperimentCatalog;
+//import org.apache.curator.framework.CuratorFramework;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class JobExecutionContext extends AbstractContext implements Serializable{
+//
+//    private static final Logger log = LoggerFactory.getLogger(JobExecutionContext.class);
+//    private GFacConfiguration gfacConfiguration;
+//    private ApplicationContext applicationContext;
+//    private MessageContext inMessageContext;
+//    private MessageContext outMessageContext;
+//    //FIXME : not needed for gfac
+//    private Experiment experiment;
+//    private TaskDetails taskData;
+//    private JobDetails jobDetails;
+//    // FIXME : not needed for gfac
+//    private WorkflowNodeDetails workflowNodeDetails;
+//    private GFac gfac;
+//    private CuratorFramework curatorClient;
+//    private String credentialStoreToken;
+//    /**
+//     * User defined scratch/temp directory
+//     */
+//    private String scratchLocation;
+//    private String loginUserName;
+//    /**
+//     * User defined working directory.
+//     */
+//    private String workingDir;
+//    /**
+//     * Input data directory
+//     */
+//    private String inputDir;
+//    /**
+//     * Output data directory
+//     */
+//    private String outputDir;
+//    /**
+//     * standard output file path
+//     */
+//    private String standardOutput;
+//    /**
+//     * standard error file path
+//     */
+//    private String standardError;
+//    /**
+//     * User preferred job submission protocol.
+//     */
+//    private JobSubmissionProtocol preferredJobSubmissionProtocol;
+//    /**
+//     * User preferred data movement protocol.
+//     */
+//    private DataMovementProtocol preferredDataMovementProtocol;
+//    /**
+//     * List of job submission protocols sorted by priority order.
+//     */
+//    private List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces;
+//    /**
+//     * use preferred job submission protocol.
+//     */
+//    private JobSubmissionInterface preferredJobSubmissionInterface;
+//
+//    private ResourceJobManager resourceJobManager;
+//    /**
+//     * List of job submission protocols sorted by priority order.
+//     */
+//    private List<DataMovementInterface> hostPrioritizedDataMovementInterfaces;
+//    /**
+//     * use preferred job submission protocol.
+//     */
+//    private DataMovementInterface preferredDataMovementInterface;
+//
+////    private ContextHeaderDocument.ContextHeader contextHeader;
+//
+//    // Keep track of the current path of the message. Before hitting provider its in-path.
+//    // After provider its out-path.
+//    private boolean inPath = true;
+//
+//    // Keep list of full qualified class names of GFac handlers which should invoked before
+//    // the provider. This is specific to current job being executed.
+//    private List<String> inHandlers = new ArrayList<String>();
+//
+//    // Keep list of full qualified class names of GFac handlers which should invoked after
+//    // the provider. This is specific to current job being executed.
+//    private List<String> outHandlers = new ArrayList<String>();
+//
+//    // During the execution of in-flow one of the handlers(Scheduling handler) will
+//    // set this and GFac API will get it from the JobExecutionContext and execute the provider.
+//    private GFacProvider provider;
+//
+//    // Service description is used by GFac to mainly specify input/output parameters for a job
+//    // and to expose a job as a service to the outside world. This service concept abstract out
+//    // a scientific application(or algorithm) as a service. Service name is there to identify to
+//    // which service description we should refer during the execution of the current job represented
+//    // by this context instance.
+//    private String applicationName;
+//    private String experimentID;
+//    private AppCatalog appCatalog;
+//    private String gatewayID;
+//    private String status;
+//    private List<String> outputFileList;
+//    private ExperimentCatalog experimentCatalog;
+//    private LocalEventPublisher localEventPublisher;
+//
+//    public String getGatewayID() {
+//        return gatewayID;
+//    }
+//
+//    public void setGatewayID(String gatewayID) {
+//        this.gatewayID = gatewayID;
+//    }
+//
+//
+//    /**
+//     *  Security context is used to handle authentication for input handlers and providers.
+//     *  There can be multiple security requirement for a single job so this allows you to add multiple security types
+//     *
+//     */
+//    private Map<String, SecurityContext> securityContext = new HashMap<String, SecurityContext>();
+//
+//    public JobExecutionContext(GFacConfiguration gFacConfiguration,String applicationName){
+//        this.gfacConfiguration = gFacConfiguration;
+//        setApplicationName(applicationName);
+//        outputFileList = new ArrayList<String>();
+//    }
+//
+//    public AppCatalog getAppCatalog() {
+//        return appCatalog;
+//    }
+//
+//    public void setAppCatalog(AppCatalog appCatalog) {
+//        if (appCatalog == null) {
+//            try {
+//                this.appCatalog = RegistryFactory.getAppCatalog();
+//            } catch (AppCatalogException e) {
+//                log.error("Unable to create app catalog instance", e);
+//            }
+//        } else {
+//            this.appCatalog = appCatalog;
+//        }
+//    }
+//
+//    public String getExperimentID() {
+//        return experimentID;
+//    }
+//
+//    public void setExperimentID(String experimentID) {
+//        this.experimentID = experimentID;
+//    }
+//
+//    public ApplicationContext getApplicationContext() {
+//        return applicationContext;
+//    }
+//
+//    public void setApplicationContext(ApplicationContext applicationContext) {
+//        this.applicationContext = applicationContext;
+//    }
+//
+//    public MessageContext getInMessageContext() {
+//        return inMessageContext;
+//    }
+//
+//    public void setInMessageContext(MessageContext inMessageContext) {
+//        this.inMessageContext = inMessageContext;
+//    }
+//
+//    public MessageContext getOutMessageContext() {
+//        return outMessageContext;
+//    }
+//
+//    public void setOutMessageContext(MessageContext outMessageContext) {
+//        this.outMessageContext = outMessageContext;
+//    }
+//
+//    public GFacConfiguration getGFacConfiguration() {
+//        return gfacConfiguration;
+//    }
+//
+//    public GFacProvider getProvider() {
+//        return provider;
+//    }
+//
+//    public void setProvider(GFacProvider provider) {
+//        this.provider = provider;
+//    }
+//
+//    public List<String> getInHandlers() {
+//        return inHandlers;
+//    }
+//
+//    public void setInHandlers(List<String> inHandlers) {
+//        this.inHandlers = inHandlers;
+//    }
+//
+//    public List<String> getOutHandlers() {
+//        return outHandlers;
+//    }
+//
+//    public void setOutHandlers(List<String> outHandlers) {
+//        this.outHandlers = outHandlers;
+//    }
+//
+//    public String getApplicationName() {
+//        return applicationName;
+//    }
+//
+//    public void setApplicationName(String applicationName) {
+//        this.applicationName = applicationName;
+//    }
+//
+//    public boolean isInPath() {
+//        return inPath;
+//    }
+//
+//    public TaskDetails getTaskData() {
+//		return taskData;
+//	}
+//
+//	public void setTaskData(TaskDetails taskData) {
+//		this.taskData = taskData;
+//	}
+//
+//	public boolean isOutPath(){
+//        return !inPath;
+//    }
+//
+//    public void setInPath() {
+//        this.inPath = true;
+//    }
+//
+//    public void setOutPath(){
+//        this.inPath = false;
+//    }
+//
+//    public ResourceJobManager getResourceJobManager() {
+//        return resourceJobManager;
+//    }
+//
+//    public void setResourceJobManager(ResourceJobManager resourceJobManager) {
+//        this.resourceJobManager = resourceJobManager;
+//    }
+//
+//    public SecurityContext getSecurityContext(String name) throws GFacException{
+//		SecurityContext secContext = securityContext.get(name);
+//		return secContext;
+//	}
+//
+//	public void addSecurityContext(String name, SecurityContext value){
+//		securityContext.put(name, value);
+//    }
+//
+//	public String getStatus() {
+//		return status;
+//	}
+//
+//	public void setStatus(String status) {
+//		this.status = status;
+//	}
+//
+//    public JobDetails getJobDetails() {
+//        return jobDetails;
+//    }
+//
+//    public void setJobDetails(JobDetails jobDetails) {
+//        this.jobDetails = jobDetails;
+//    }
+//
+//    public void addOutputFile(String file) {
+//        outputFileList.add(file);
+//    }
+//    public List<String> getOutputFiles(){
+//        return outputFileList;
+//    }
+//
+//    public ExperimentCatalog getExperimentCatalog() {
+//        return experimentCatalog;
+//    }
+//
+//    public Map<String, SecurityContext>  getAllSecurityContexts(){
+//        return securityContext;
+//    }
+//
+//    public void setExperimentCatalog(ExperimentCatalog experimentCatalog) {
+//        this.experimentCatalog = experimentCatalog;
+//    }
+//
+//    public Experiment getExperiment() {
+//        return experiment;
+//    }
+//
+//    public void setExperiment(Experiment experiment) {
+//        this.experiment = experiment;
+//    }
+//
+//    public WorkflowNodeDetails getWorkflowNodeDetails() {
+//        return workflowNodeDetails;
+//    }
+//
+//    public void setWorkflowNodeDetails(WorkflowNodeDetails workflowNodeDetails) {
+//        this.workflowNodeDetails = workflowNodeDetails;
+//    }
+//
+//    public GFac getGfac() {
+//        return gfac;
+//    }
+//
+//    public void setGfac(GFac gfac) {
+//        this.gfac = gfac;
+//    }
+//
+//    public String getCredentialStoreToken() {
+//        return credentialStoreToken;
+//    }
+//
+//    public void setCredentialStoreToken(String credentialStoreToken) {
+//        this.credentialStoreToken = credentialStoreToken;
+//    }
+//
+//    public String getScratchLocation() {
+//        return scratchLocation;
+//    }
+//
+//    public void setScratchLocation(String scratchLocation) {
+//        this.scratchLocation = scratchLocation;
+//    }
+//
+//    public String getWorkingDir() {
+//        return workingDir;
+//    }
+//
+//    public void setWorkingDir(String workingDir) {
+//        this.workingDir = workingDir;
+//    }
+//
+//    public String getInputDir() {
+//        return inputDir;
+//    }
+//
+//    public void setInputDir(String inputDir) {
+//        this.inputDir = inputDir;
+//    }
+//
+//    public String getOutputDir() {
+//        return outputDir;
+//    }
+//
+//    public void setOutputDir(String outputDir) {
+//        this.outputDir = outputDir;
+//    }
+//
+//    public String getStandardOutput() {
+//        return standardOutput;
+//    }
+//
+//    public void setStandardOutput(String standardOutput) {
+//        this.standardOutput = standardOutput;
+//    }
+//
+//    public String getStandardError() {
+//        return standardError;
+//    }
+//
+//    public void setStandardError(String standardError) {
+//        this.standardError = standardError;
+//    }
+//
+//    public JobSubmissionProtocol getPreferredJobSubmissionProtocol() {
+//        return preferredJobSubmissionProtocol;
+//    }
+//
+//    public void setPreferredJobSubmissionProtocol(JobSubmissionProtocol preferredJobSubmissionProtocol) {
+//        this.preferredJobSubmissionProtocol = preferredJobSubmissionProtocol;
+//    }
+//
+//    public DataMovementProtocol getPreferredDataMovementProtocol() {
+//        return preferredDataMovementProtocol;
+//    }
+//
+//    public void setPreferredDataMovementProtocol(DataMovementProtocol preferredDataMovementProtocol) {
+//        this.preferredDataMovementProtocol = preferredDataMovementProtocol;
+//    }
+//
+//    public List<JobSubmissionInterface> getHostPrioritizedJobSubmissionInterfaces() {
+//        return hostPrioritizedJobSubmissionInterfaces;
+//    }
+//
+//    public void setHostPrioritizedJobSubmissionInterfaces(List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces) {
+//        this.hostPrioritizedJobSubmissionInterfaces = hostPrioritizedJobSubmissionInterfaces;
+//    }
+//
+//    public JobSubmissionInterface getPreferredJobSubmissionInterface() {
+//        return preferredJobSubmissionInterface;
+//    }
+//
+//    public void setPreferredJobSubmissionInterface(JobSubmissionInterface preferredJobSubmissionInterface) {
+//        this.preferredJobSubmissionInterface = preferredJobSubmissionInterface;
+//    }
+//
+//    public String getHostName() {
+//        return applicationContext.getComputeResourceDescription().getHostName();
+//    }
+//
+//    public List<DataMovementInterface> getHostPrioritizedDataMovementInterfaces() {
+//        return hostPrioritizedDataMovementInterfaces;
+//    }
+//
+//    public void setHostPrioritizedDataMovementInterfaces(List<DataMovementInterface> hostPrioritizedDataMovementInterfaces) {
+//        this.hostPrioritizedDataMovementInterfaces = hostPrioritizedDataMovementInterfaces;
+//    }
+//
+//    public DataMovementInterface getPreferredDataMovementInterface() {
+//        return preferredDataMovementInterface;
+//    }
+//
+//    public void setPreferredDataMovementInterface(DataMovementInterface preferredDataMovementInterface) {
+//        this.preferredDataMovementInterface = preferredDataMovementInterface;
+//    }
+//
+//    public CuratorFramework getCuratorClient() {
+//        return curatorClient;
+//    }
+//
+//    public void setCuratorClient(CuratorFramework curatorClient) {
+//        this.curatorClient = curatorClient;
+//    }
+//
+//    public String getExecutablePath() {
+//        if (applicationContext == null || applicationContext.getApplicationDeploymentDescription() == null) {
+//            return null;
+//        } else {
+//            return applicationContext.getApplicationDeploymentDescription().getExecutablePath();
+//        }
+//    }
+//
+//
+//
+//    public String getLoginUserName() {
+//        return loginUserName;
+//    }
+//
+//    public void setLoginUserName(String loginUserName) {
+//        this.loginUserName = loginUserName;
+//    }
+//
+//    public LocalEventPublisher getLocalEventPublisher() {
+//        return localEventPublisher;
+//    }
+//
+//    public void setLocalEventPublisher(LocalEventPublisher localEventPublisher) {
+//        this.localEventPublisher = localEventPublisher;
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
index 5a0e597..5f3471c 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
@@ -20,7 +20,7 @@
 */
 package org.apache.airavata.gfac.core.monitor;
 
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
+import org.apache.airavata.gfac.core.context.ProcessContext;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.status.JobState;
 import org.slf4j.Logger;
@@ -61,7 +61,7 @@ public class MonitorID {
 
     private JobState state;
 
-    private JobExecutionContext jobExecutionContext;
+    private ProcessContext processContext;
 
     public MonitorID() {
     }
@@ -86,16 +86,16 @@ public class MonitorID {
         this.jobName = jobName;
     }
 
-    public MonitorID(JobExecutionContext jobExecutionContext) {
-/*        this.jobExecutionContext = jobExecutionContext;
-        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
-        userName = jobExecutionContext.getExperiment().getUserName();
-        taskID = jobExecutionContext.getTaskData().getTaskID();
-        experimentID = jobExecutionContext.getExperiment().getExperimentID();
-        workflowNodeID = jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId();// at this point we only have one node todo: fix this
+    public MonitorID(ProcessContext processContext) {
+/*        this.processContext = processContext;
+        this.computeResourceDescription = processContext.getApplicationContext().getComputeResourceDescription();
+        userName = processContext.getExperiment().getUserName();
+        taskID = processContext.getTaskData().getTaskID();
+        experimentID = processContext.getExperiment().getExperimentID();
+        workflowNodeID = processContext.getWorkflowNodeDetails().getNodeInstanceId();// at this point we only have one node todo: fix this
         try {
-            jobName = jobExecutionContext.getJobDetails().getJobName();
-            jobID = jobExecutionContext.getJobDetails().getJobID();
+            jobName = processContext.getJobDetails().getJobName();
+            jobID = processContext.getJobDetails().getJobID();
         }catch(NullPointerException e){
             logger.error("There is not job created at this point");
             // this is not a big deal we create MonitorId before having a jobId or job Name
@@ -210,12 +210,12 @@ public class MonitorID {
         this.workflowNodeID = workflowNodeID;
     }
 
-    public JobExecutionContext getJobExecutionContext() {
-        return jobExecutionContext;
+    public ProcessContext getProcessContext() {
+        return processContext;
     }
 
-    public void setJobExecutionContext(JobExecutionContext jobExecutionContext) {
-        this.jobExecutionContext = jobExecutionContext;
+    public void setProcessContext(ProcessContext processContext) {
+        this.processContext = processContext;
     }
 
     public String getJobName() {

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/GSSContextX509.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/GSSContextX509.java b/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/GSSContextX509.java
new file mode 100644
index 0000000..06cd356
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/GSSContextX509.java
@@ -0,0 +1,205 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package com.jcraft.jsch;
+
+
+import org.globus.common.CoGProperties;
+import org.globus.gsi.gssapi.auth.HostAuthorization;
+import org.gridforum.jgss.ExtendedGSSCredential;
+import org.gridforum.jgss.ExtendedGSSManager;
+import org.ietf.jgss.GSSContext;
+import org.ietf.jgss.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+/**
+ * This class is based on GSSContextKrb5; it substitutes the globus
+ * ExtendedGSSManager and uses the SecurityUtils method to get the credential if
+ * one is not passed in from memory.
+ *
+ */
+public class GSSContextX509 implements com.jcraft.jsch.GSSContext {
+
+    private GSSContext context = null;
+    private GSSCredential credential;
+    private static final Logger logger = LoggerFactory.getLogger(GSSContextX509.class);
+
+    public void create(String user, String host) throws JSchException {
+        try {
+//			ExtendedGSSManager manager = (ExtendedGSSManager) ExtendedGSSManager.getInstance();
+
+            if (credential == null) {
+                try {
+                    credential = getCredential();
+                } catch (SecurityException t) {
+                    System.out.printf("Could not get proxy: %s: %s\n", t.getClass().getSimpleName(), t.getMessage());
+                    throw new JSchException(t.toString());
+                }
+            }
+
+            String cname = host;
+
+            try {
+                cname = InetAddress.getByName(cname).getCanonicalHostName();
+            } catch (UnknownHostException e) {
+            }
+
+            GSSName name = HostAuthorization.getInstance().getExpectedName(credential, cname);
+
+//			context = manager.createContext(name, null, credential, GSSContext.DEFAULT_LIFETIME);
+//
+//			// RFC4462 3.4. GSS-API Session
+//			//
+//			// When calling GSS_Init_sec_context(), the client MUST set
+//			// integ_req_flag to "true" to request that per-message integrity
+//			// protection be supported for this context. In addition,
+//			// deleg_req_flag MAY be set to "true" to request access delegation,
+//			// if
+//			// requested by the user.
+//			//
+//			// Since the user authentication process by its nature authenticates
+//			// only the client, the setting of mutual_req_flag is not needed for
+//			// this process. This flag SHOULD be set to "false".
+//
+//			// TODO: OpenSSH's sshd does accept 'false' for mutual_req_flag
+//			// context.requestMutualAuth(false);
+//			context.requestMutualAuth(true);
+//			context.requestConf(true);
+//			context.requestInteg(true); // for MIC
+//			context.requestCredDeleg(true);
+//			context.requestAnonymity(false);
+
+//            context = new BCGSSContextImpl(name, (GlobusGSSCredentialImpl) credential);
+//            context.requestLifetime(GSSCredential.DEFAULT_LIFETIME);
+//            context.requestCredDeleg(true);
+//            context.requestMutualAuth(true);
+//            context.requestReplayDet(true);
+//            context.requestSequenceDet(true);
+//            context.requestConf(false);
+//            context.requestInteg(true);
+//            ((ExtendedGSSContext)context).setOption(GSSConstants.DELEGATION_TYPE, GSIConstants.DELEGATION_TYPE_FULL);
+
+            return;
+        } catch (GSSException ex) {
+            throw new JSchException(ex.toString());
+        }
+    }
+
+    private static GSSCredential getProxy() {
+        return getProxy(null, GSSCredential.DEFAULT_LIFETIME);
+    }
+
+    /**
+     * @param x509_USER_PROXY
+     *            path to the proxy.
+     * @param credentialLifetime
+     *            in seconds.
+     * @return valid credential.
+     *             if proxy task throws exception (or if proxy cannot be found).
+     */
+    private static GSSCredential getProxy(String x509_USER_PROXY, int credentialLifetime) throws SecurityException {
+        if (x509_USER_PROXY == null)
+            x509_USER_PROXY = System.getProperty("x509.user.proxy");
+
+//		if (x509_USER_PROXY == null) {
+//			SystemUtils.envToProperties();
+//			x509_USER_PROXY = System.getProperty("x509.user.proxy");
+//		}
+
+        if (x509_USER_PROXY == null || "".equals(x509_USER_PROXY))
+            x509_USER_PROXY = CoGProperties.getDefault().getProxyFile();
+
+        if (x509_USER_PROXY == null)
+            throw new SecurityException("could not get credential; no location defined");
+
+        ExtendedGSSManager manager = (ExtendedGSSManager) ExtendedGSSManager.getInstance();
+
+        // file...load file into a buffer
+        try {
+            File f = new File(x509_USER_PROXY);
+            byte[] data = new byte[(int) f.length()];
+            FileInputStream in = new FileInputStream(f);
+            // read in the credential data
+            in.read(data);
+            in.close();
+            return manager.createCredential(data, ExtendedGSSCredential.IMPEXP_OPAQUE, credentialLifetime, null, // use
+                    // default
+                    // mechanism
+                    // -
+                    // GSI
+                    GSSCredential.INITIATE_AND_ACCEPT);
+        } catch (Throwable t) {
+            throw new SecurityException("could not get credential from " + x509_USER_PROXY, t);
+        }
+    }
+
+    public boolean isEstablished() {
+        // this must check to see if the call returned GSS_S_COMPLETE
+        if (context != null){
+            return context.isEstablished();
+        }
+        return false;
+    }
+
+    public byte[] init(byte[] token, int s, int l) throws JSchException {
+        try {
+            if (context != null){
+                return context.initSecContext(token, s, l);
+            }else {
+                throw new JSchException("Context is null..");
+            }
+        } catch (GSSException ex) {
+            throw new JSchException(ex.toString());
+        }
+    }
+
+    public byte[] getMIC(byte[] message, int s, int l) {
+        try {
+            MessageProp prop = new MessageProp(0, false);
+            return context.getMIC(message, s, l, prop);
+        } catch (GSSException ex) {
+            logger.error(ex.getMessage(), ex);
+            return null;
+        }
+    }
+
+    public void dispose() {
+        try {
+            context.dispose();
+        } catch (GSSException ex) {
+        }
+    }
+
+    public void setCredential(GSSCredential credential) {
+        this.credential = credential;
+    }
+
+    public GSSCredential getCredential() {
+        return credential;
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/UserAuthGSSAPIWithMICGSSCredentials.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/UserAuthGSSAPIWithMICGSSCredentials.java b/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/UserAuthGSSAPIWithMICGSSCredentials.java
index d0e98db..747338a 100644
--- a/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/UserAuthGSSAPIWithMICGSSCredentials.java
+++ b/modules/gfac/gfac-impl/src/main/java/com/jcraft/jsch/UserAuthGSSAPIWithMICGSSCredentials.java
@@ -22,7 +22,6 @@
 package com.jcraft.jsch;
 
 import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gfac.gsi.ssh.GSSContextX509;
 import org.globus.gsi.gssapi.GSSConstants;
 import org.ietf.jgss.GSSException;
 import org.ietf.jgss.Oid;

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/SSHJobSubmissionTask.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/SSHJobSubmissionTask.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/SSHJobSubmissionTask.java
index 7f4c13a..9873de5 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/SSHJobSubmissionTask.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/SSHJobSubmissionTask.java
@@ -22,18 +22,14 @@
 package org.apache.airavata.gfac.impl.task;
 
 import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.Constants;
-import org.apache.airavata.common.utils.LocalEventPublisher;
 import org.apache.airavata.gfac.core.*;
 import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.ProcessContext;
 import org.apache.airavata.gfac.core.context.TaskContext;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.core.task.JobSubmissionTask;
 import org.apache.airavata.gfac.core.task.TaskException;
 import org.apache.airavata.gfac.impl.Factory;
 import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
 import org.apache.airavata.model.job.JobModel;
 import org.apache.airavata.model.status.JobState;
 import org.apache.airavata.model.status.JobStatus;
@@ -67,26 +63,8 @@ public class SSHJobSubmissionTask implements JobSubmissionTask {
             jobModel.setJobName(jobDescriptor.getJobName());
             ResourceJobManager resourceJobManager = GFacUtils.getResourceJobManager(processContext);
             JobManagerConfiguration jConfig = null;
-            if (resourceJobManager != null){
-                String installedParentPath = resourceJobManager.getJobManagerBinPath();
-                if (installedParentPath == null) {
-                    installedParentPath = "/";
-                }
-                ResourceJobManagerType resourceJobManagerType = resourceJobManager.getResourceJobManagerType();
-                if (resourceJobManagerType == null) {
-                    log.error("No Job Manager is configured, so we are picking pbs as the default job manager");
-                    jConfig = Factory.getPBSJobManager(installedParentPath);
-                } else {
-                    if (ResourceJobManagerType.PBS == resourceJobManagerType) {
-                        jConfig = Factory.getPBSJobManager(installedParentPath);
-                    } else if (ResourceJobManagerType.SLURM == resourceJobManagerType) {
-                        jConfig = Factory.getSLURMJobManager(installedParentPath);
-                    } else if (ResourceJobManagerType.UGE == resourceJobManagerType) {
-                        jConfig = Factory.getUGEJobManager(installedParentPath);
-                    } else if (ResourceJobManagerType.LSF == resourceJobManagerType) {
-                        jConfig = Factory.getLSFJobManager(installedParentPath);
-                    }
-                }
+            if (resourceJobManager != null) {
+                jConfig = Factory.getJobManagerConfiguration(resourceJobManager);
             }
             File jobFile = GFacUtils.createJobFile(jobDescriptor, jConfig);
             if (jobFile != null && jobFile.exists()){

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
index 2f9e3b0..a76a866 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
@@ -1,62 +1,62 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.local.handler;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.GFacHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.util.Properties;
-
-public class LocalDirectorySetupHandler implements GFacHandler {
-    private static final Logger log = LoggerFactory.getLogger(LocalDirectorySetupHandler.class);
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        log.info("Invoking LocalDirectorySetupHandler ...");
-        log.debug("working directory = " + jobExecutionContext.getWorkingDir());
-        log.debug("temp directory = " + jobExecutionContext.getWorkingDir());
-
-        makeFileSystemDir(jobExecutionContext.getWorkingDir());
-        makeFileSystemDir(jobExecutionContext.getInputDir());
-        makeFileSystemDir(jobExecutionContext.getOutputDir());
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    private void makeFileSystemDir(String dir) throws GFacHandlerException {
-           File f = new File(dir);
-           if (f.isDirectory() && f.exists()) {
-               return;
-           } else if (!new File(dir).mkdir()) {
-               throw new GFacHandlerException("Cannot create directory " + dir);
-           }
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.local.handler;
+//
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.GFacHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.util.Properties;
+//
+//public class LocalDirectorySetupHandler implements GFacHandler {
+//    private static final Logger log = LoggerFactory.getLogger(LocalDirectorySetupHandler.class);
+//
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        log.info("Invoking LocalDirectorySetupHandler ...");
+//        log.debug("working directory = " + jobExecutionContext.getWorkingDir());
+//        log.debug("temp directory = " + jobExecutionContext.getWorkingDir());
+//
+//        makeFileSystemDir(jobExecutionContext.getWorkingDir());
+//        makeFileSystemDir(jobExecutionContext.getInputDir());
+//        makeFileSystemDir(jobExecutionContext.getOutputDir());
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    private void makeFileSystemDir(String dir) throws GFacHandlerException {
+//           File f = new File(dir);
+//           if (f.isDirectory() && f.exists()) {
+//               return;
+//           } else if (!new File(dir).mkdir()) {
+//               throw new GFacHandlerException("Cannot create directory " + dir);
+//           }
+//    }
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//}


[2/5] airavata git commit: fixing compilation issues

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
index 0a2c1d2..95368f6 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
@@ -1,256 +1,256 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.gfac.core.GFacConstants;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.impl.OutputUtils;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.DataTransferDetails;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.model.experiment.TransferState;
-import org.apache.airavata.model.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-public class SSHOutputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(SSHOutputHandler.class);
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        String hostAddress = jobExecutionContext.getHostName();
-        try {
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage());
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-            } catch (GFacException e1) {
-                log.error(e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        }
-
-        super.invoke(jobExecutionContext);
-        DataTransferDetails detail = new DataTransferDetails();
-        detail.setTransferDescription("Output data staging");
-        TransferStatus status = new TransferStatus();
-
-        RemoteCluster remoteCluster = null;
-        try {
-             remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-            if (remoteCluster == null) {
-                throw new GFacProviderException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-
-            // Get the Stdouts and StdErrs
-            String timeStampedExperimentID = GFacUtils.createUniqueNameWithDate(jobExecutionContext.getExperimentID());
-
-            TaskDetails taskData = jobExecutionContext.getTaskData();
-            String outputDataDir = ServerSettings.getSetting(GFacConstants.OUTPUT_DATA_DIR, File.separator + "tmp");
-            File localStdOutFile;
-            File localStdErrFile;
-            //FIXME: AdvancedOutput is remote location and third party transfer should work to make this work 
-//            if (taskData.getAdvancedOutputDataHandling() != null) {
-//                outputDataDir = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.common.utils.ServerSettings;
+//import org.apache.airavata.gfac.core.GFacConstants;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.impl.OutputUtils;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.apache.airavata.model.experiment.CorrectiveAction;
+//import org.apache.airavata.model.experiment.DataTransferDetails;
+//import org.apache.airavata.model.experiment.ErrorCategory;
+//import org.apache.airavata.model.experiment.TaskDetails;
+//import org.apache.airavata.model.experiment.TransferState;
+//import org.apache.airavata.model.experiment.TransferStatus;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.util.ArrayList;
+//import java.util.List;
+//import java.util.Map;
+//import java.util.Properties;
+//import java.util.Set;
+//
+//public class SSHOutputHandler extends AbstractHandler {
+//    private static final Logger log = LoggerFactory.getLogger(SSHOutputHandler.class);
+//
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        String hostAddress = jobExecutionContext.getHostName();
+//        try {
+//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//                GFACSSHUtils.addSecurityContext(jobExecutionContext);
 //            }
-            if (outputDataDir == null) {
-                outputDataDir = File.separator + "tmp";
-            }
-            outputDataDir = outputDataDir + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID();
-            (new File(outputDataDir)).mkdirs();
-
-
-            localStdOutFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stdout");
-            localStdErrFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stderr");
-//            remoteCluster.makeDirectory(outputDataDir);
-            int i = 0;
-            String stdOutStr = "";
-            while (stdOutStr.isEmpty()) {
-                try {
-                    remoteCluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
-                    stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
-                } catch (Exception e) {
-                    log.error(e.getLocalizedMessage());
-                    Thread.sleep(2000);
-                }
-                i++;
-                if (i == 3) break;
-            }
-            Thread.sleep(1000);
-            remoteCluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
-            Thread.sleep(1000);
-
-            String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
-            status.setTransferState(TransferState.STDOUT_DOWNLOAD);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("STDOUT:" + localStdOutFile.getAbsolutePath());
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-            status.setTransferState(TransferState.STDERROR_DOWNLOAD);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("STDERR:" + localStdErrFile.getAbsolutePath());
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-
-            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-            Set<String> keys = output.keySet();
-            for (String paramName : keys) {
-                OutputDataObjectType actualParameter = (OutputDataObjectType) output.get(paramName);
-                if (DataType.URI == actualParameter.getType()) {
-                    List<String> outputList = null;
-                    int retry = 3;
-                    while (retry > 0) {
-                        outputList = remoteCluster.listDirectory(jobExecutionContext.getOutputDir());
-                        if (outputList.size() > 0) {
-                            break;
-                        }
-                        retry--;
-                        Thread.sleep(2000);
-                    }
-
-                    if (outputList.size() == 0 || outputList.get(0).isEmpty() || outputList.size() > 1) {
-                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
-                        Set<String> strings = output.keySet();
-                        outputArray.clear();
-                        for (String key : strings) {
-                            OutputDataObjectType actualParameter1 = (OutputDataObjectType) output.get(key);
-                            if (DataType.URI == actualParameter1.getType()) {
-                                String downloadFile = actualParameter1.getValue();
-                                remoteCluster.scpFrom(downloadFile, outputDataDir);
-                                String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-                                String localFile = outputDataDir + File.separator + fileName;
-                                jobExecutionContext.addOutputFile(localFile);
-                                actualParameter1.setValue(localFile);
-                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                                dataObjectType.setValue(localFile);
-                                dataObjectType.setName(key);
-                                dataObjectType.setType(DataType.URI);
-                                outputArray.add(dataObjectType);
-                            }else if (DataType.STDOUT == actualParameter.getType()) {
-                                String fileName = localStdOutFile.getName();
-                                String localFile = outputDataDir + File.separator + fileName;
-                                jobExecutionContext.addOutputFile(localFile);
-                                actualParameter.setValue(localFile);
-                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                                dataObjectType.setValue(localFile);
-                                dataObjectType.setName(key);
-                                dataObjectType.setType(DataType.STDOUT);
-                                outputArray.add(dataObjectType);
-                            }else if (DataType.STDERR == actualParameter.getType()) {
-                                String fileName = localStdErrFile.getName();
-                                String localFile = outputDataDir + File.separator + fileName;
-                                jobExecutionContext.addOutputFile(localFile);
-                                actualParameter.setValue(localFile);
-                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                                dataObjectType.setValue(localFile);
-                                dataObjectType.setName(key);
-                                dataObjectType.setType(DataType.STDERR);
-                                outputArray.add(dataObjectType);
-                            }
-                        }
-                        break;
-                    } else if (outputList.size() == 1) {//FIXME: Ultrascan case
-                        String valueList = outputList.get(0);
-                        remoteCluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
-                        String outputPath = outputDataDir + File.separator + valueList;
-                        jobExecutionContext.addOutputFile(outputPath);
-                        actualParameter.setValue(outputPath);
-                        OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                        dataObjectType.setValue(outputPath);
-                        dataObjectType.setName(paramName);
-                        dataObjectType.setType(DataType.URI);
-                        outputArray.add(dataObjectType);
-                    }
-                } else {
-                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
-                }
-            }
-            if (outputArray == null || outputArray.isEmpty()) {
-                log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
-                if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
-                    throw new GFacHandlerException(
-                            "Empty Output returned from the Application, Double check the application"
-                                    + "and ApplicationDescriptor output Parameter Names");
-                }
-            }
-            jobExecutionContext.setStandardError(localStdErrFile.getAbsolutePath());
-            jobExecutionContext.setStandardOutput(localStdOutFile.getAbsolutePath());
-            jobExecutionContext.setOutputDir(outputDataDir);
-            status.setTransferState(TransferState.DOWNLOAD);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription(outputDataDir);
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-            experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-
-        } catch (Exception e) {
-            try {
-                status.setTransferState(TransferState.FAILED);
-                detail.setTransferStatus(status);
-                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-            } catch (Exception e1) {
-                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error in retrieving results", e);
-        }
-
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
+//        } catch (Exception e) {
+//            log.error(e.getMessage());
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//            } catch (GFacException e1) {
+//                log.error(e1.getLocalizedMessage());
+//            }
+//            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//        }
+//
+//        super.invoke(jobExecutionContext);
+//        DataTransferDetails detail = new DataTransferDetails();
+//        detail.setTransferDescription("Output data staging");
+//        TransferStatus status = new TransferStatus();
+//
+//        RemoteCluster remoteCluster = null;
+//        try {
+//             remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//            if (remoteCluster == null) {
+//                throw new GFacProviderException("Security context is not set properly");
+//            } else {
+//                log.info("Successfully retrieved the Security Context");
+//            }
+//
+//            // Get the Stdouts and StdErrs
+//            String timeStampedExperimentID = GFacUtils.createUniqueNameWithDate(jobExecutionContext.getExperimentID());
+//
+//            TaskDetails taskData = jobExecutionContext.getTaskData();
+//            String outputDataDir = ServerSettings.getSetting(GFacConstants.OUTPUT_DATA_DIR, File.separator + "tmp");
+//            File localStdOutFile;
+//            File localStdErrFile;
+//            //FIXME: AdvancedOutput is remote location and third party transfer should work to make this work
+////            if (taskData.getAdvancedOutputDataHandling() != null) {
+////                outputDataDir = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
+////            }
+//            if (outputDataDir == null) {
+//                outputDataDir = File.separator + "tmp";
+//            }
+//            outputDataDir = outputDataDir + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID();
+//            (new File(outputDataDir)).mkdirs();
+//
+//
+//            localStdOutFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stdout");
+//            localStdErrFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stderr");
+////            remoteCluster.makeDirectory(outputDataDir);
+//            int i = 0;
+//            String stdOutStr = "";
+//            while (stdOutStr.isEmpty()) {
+//                try {
+//                    remoteCluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
+//                    stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
+//                } catch (Exception e) {
+//                    log.error(e.getLocalizedMessage());
+//                    Thread.sleep(2000);
+//                }
+//                i++;
+//                if (i == 3) break;
+//            }
+//            Thread.sleep(1000);
+//            remoteCluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
+//            Thread.sleep(1000);
+//
+//            String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
+//            status.setTransferState(TransferState.STDOUT_DOWNLOAD);
+//            detail.setTransferStatus(status);
+//            detail.setTransferDescription("STDOUT:" + localStdOutFile.getAbsolutePath());
+//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//
+//            status.setTransferState(TransferState.STDERROR_DOWNLOAD);
+//            detail.setTransferStatus(status);
+//            detail.setTransferDescription("STDERR:" + localStdErrFile.getAbsolutePath());
+//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//
+//
+//            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
+//            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
+//            Set<String> keys = output.keySet();
+//            for (String paramName : keys) {
+//                OutputDataObjectType actualParameter = (OutputDataObjectType) output.get(paramName);
+//                if (DataType.URI == actualParameter.getType()) {
+//                    List<String> outputList = null;
+//                    int retry = 3;
+//                    while (retry > 0) {
+//                        outputList = remoteCluster.listDirectory(jobExecutionContext.getOutputDir());
+//                        if (outputList.size() > 0) {
+//                            break;
+//                        }
+//                        retry--;
+//                        Thread.sleep(2000);
+//                    }
+//
+//                    if (outputList.size() == 0 || outputList.get(0).isEmpty() || outputList.size() > 1) {
+//                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
+//                        Set<String> strings = output.keySet();
+//                        outputArray.clear();
+//                        for (String key : strings) {
+//                            OutputDataObjectType actualParameter1 = (OutputDataObjectType) output.get(key);
+//                            if (DataType.URI == actualParameter1.getType()) {
+//                                String downloadFile = actualParameter1.getValue();
+//                                remoteCluster.scpFrom(downloadFile, outputDataDir);
+//                                String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
+//                                String localFile = outputDataDir + File.separator + fileName;
+//                                jobExecutionContext.addOutputFile(localFile);
+//                                actualParameter1.setValue(localFile);
+//                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                                dataObjectType.setValue(localFile);
+//                                dataObjectType.setName(key);
+//                                dataObjectType.setType(DataType.URI);
+//                                outputArray.add(dataObjectType);
+//                            }else if (DataType.STDOUT == actualParameter.getType()) {
+//                                String fileName = localStdOutFile.getName();
+//                                String localFile = outputDataDir + File.separator + fileName;
+//                                jobExecutionContext.addOutputFile(localFile);
+//                                actualParameter.setValue(localFile);
+//                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                                dataObjectType.setValue(localFile);
+//                                dataObjectType.setName(key);
+//                                dataObjectType.setType(DataType.STDOUT);
+//                                outputArray.add(dataObjectType);
+//                            }else if (DataType.STDERR == actualParameter.getType()) {
+//                                String fileName = localStdErrFile.getName();
+//                                String localFile = outputDataDir + File.separator + fileName;
+//                                jobExecutionContext.addOutputFile(localFile);
+//                                actualParameter.setValue(localFile);
+//                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                                dataObjectType.setValue(localFile);
+//                                dataObjectType.setName(key);
+//                                dataObjectType.setType(DataType.STDERR);
+//                                outputArray.add(dataObjectType);
+//                            }
+//                        }
+//                        break;
+//                    } else if (outputList.size() == 1) {//FIXME: Ultrascan case
+//                        String valueList = outputList.get(0);
+//                        remoteCluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
+//                        String outputPath = outputDataDir + File.separator + valueList;
+//                        jobExecutionContext.addOutputFile(outputPath);
+//                        actualParameter.setValue(outputPath);
+//                        OutputDataObjectType dataObjectType = new OutputDataObjectType();
+//                        dataObjectType.setValue(outputPath);
+//                        dataObjectType.setName(paramName);
+//                        dataObjectType.setType(DataType.URI);
+//                        outputArray.add(dataObjectType);
+//                    }
+//                } else {
+//                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
+//                }
+//            }
+//            if (outputArray == null || outputArray.isEmpty()) {
+//                log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
+//                if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
+//                    throw new GFacHandlerException(
+//                            "Empty Output returned from the Application, Double check the application"
+//                                    + "and ApplicationDescriptor output Parameter Names");
+//                }
+//            }
+//            jobExecutionContext.setStandardError(localStdErrFile.getAbsolutePath());
+//            jobExecutionContext.setStandardOutput(localStdOutFile.getAbsolutePath());
+//            jobExecutionContext.setOutputDir(outputDataDir);
+//            status.setTransferState(TransferState.DOWNLOAD);
+//            detail.setTransferStatus(status);
+//            detail.setTransferDescription(outputDataDir);
+//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//            experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
+//
+//        } catch (Exception e) {
+//            try {
+//                status.setTransferState(TransferState.FAILED);
+//                detail.setTransferStatus(status);
+//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
+//            } catch (Exception e1) {
+//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
+//            }
+//            throw new GFacHandlerException("Error in retrieving results", e);
+//        }
+//
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
index b837997..bd8a0bc 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
@@ -1,473 +1,473 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ssh.provider.impl;
-
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.model.experiment.TaskState;
-import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.apache.airavata.common.exception.AiravataException;
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.LocalEventPublisher;
-import org.apache.airavata.gfac.core.GFacConstants;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.core.monitor.state.GfacExperimentStateChangeRequest;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.states.GfacExperimentState;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsi.ssh.api.CommandExecutor;
-import org.apache.airavata.gfac.impl.StandardOutReader;
-import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
-import org.apache.airavata.gfac.monitor.email.EmailMonitorFactory;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.model.experiment.JobDetails;
-import org.apache.airavata.model.experiment.JobState;
-import org.apache.xmlbeans.XmlException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-import java.io.*;
-import java.util.*;
-
-/**
- * Execute application using remote SSH
- */
-public class SSHProvider extends AbstractProvider {
-    private static final Logger log = LoggerFactory.getLogger(SSHProvider.class);
-    private RemoteCluster remoteCluster;
-    private String jobID = null;
-    private String taskID = null;
-    // we keep gsisshprovider to support qsub submission incase of hpc scenario with ssh
-    private boolean hpcType = false;
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        try {
-            super.initialize(jobExecutionContext);
-            String hostAddress = jobExecutionContext.getHostName();
-            ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
-            ResourceJobManagerType resourceJobManagerType = resourceJobManager.getResourceJobManagerType();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-            }
-            taskID = jobExecutionContext.getTaskData().getTaskID();
-
-            JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
-            if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH && resourceJobManagerType == ResourceJobManagerType.FORK) {
-                jobID = "SSH_" + jobExecutionContext.getHostName() + "_" + Calendar.getInstance().getTimeInMillis();
-                remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-
-                String remoteFile = jobExecutionContext.getWorkingDir() + File.separatorChar + GFacConstants.EXECUTABLE_NAME;
-                details.setJobID(taskID);
-                details.setJobDescription(remoteFile);
-                jobExecutionContext.setJobDetails(details);
-                // FIXME : Why remoteCluster is passed as null
-                JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
-                details.setJobDescription(jobDescriptor.toXML());
-
-                GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
-                log.info(remoteFile);
-                File runscript = createShellScript(jobExecutionContext);
-                remoteCluster.scpTo(remoteFile, runscript.getAbsolutePath());
-            } else {
-                hpcType = true;
-            }
-        } catch (ApplicationSettingsException e) {
-            log.error(e.getMessage());
-            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        } catch (Exception e) {
-            throw new GFacProviderException(e.getLocalizedMessage(), e);
-        }
-    }
-
-
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        if (!hpcType) {
-            try {
-                /*
-                 * Execute
-                 */
-                String executable = jobExecutionContext.getWorkingDir() + File.separatorChar + GFacConstants.EXECUTABLE_NAME;
-                details.setJobDescription(executable);
-                RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + executable + "; " + executable);
-                StandardOutReader jobIDReaderCommandOutput = new StandardOutReader();
-                log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-                CommandExecutor.executeCommand(rawCommandInfo, remoteCluster.getSession(), jobIDReaderCommandOutput);
-                String stdOutputString = getOutputifAvailable(jobIDReaderCommandOutput, "Error submitting job to resource");
-                log.info("stdout=" + stdOutputString);
-            } catch (Exception e) {
-                throw new GFacProviderException(e.getMessage(), e);
-            }
-        } else {
-            try {
-                StringBuffer data = new StringBuffer();
-                JobDetails jobDetails = new JobDetails();
-                String hostAddress = jobExecutionContext.getHostName();
-                LocalEventPublisher localEventPublisher = jobExecutionContext.getLocalEventPublisher();
-                try {
-                    RemoteCluster remoteCluster = null;
-                    if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
-                    }
-                    remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-                    if (remoteCluster == null) {
-                        throw new GFacProviderException("Security context is not set properly");
-                    } else {
-                        log.info("Successfully retrieved the Security Context");
-                    }
-                    // This installed path is a mandetory field, because this could change based on the computing resource
-                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
-                    jobDetails.setJobName(jobDescriptor.getJobName());
-                    log.info(jobDescriptor.toXML());
-                    jobDetails.setJobDescription(jobDescriptor.toXML());
-                    String jobID = remoteCluster.submitBatchJob(jobDescriptor);
-                    if (jobID != null && !jobID.isEmpty()) {
-                        jobDetails.setJobID(jobID);
-                        GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SUBMITTED);
-                                localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
-                                        , GfacExperimentState.JOBSUBMITTED));
-                        jobExecutionContext.setJobDetails(jobDetails);
-                        if (verifyJobSubmissionByJobId(remoteCluster, jobID)) {
-                            localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
-                                    , GfacExperimentState.JOBSUBMITTED));
-                            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.QUEUED);
-                        }
-                    } else {
-                        jobExecutionContext.setJobDetails(jobDetails);
-                        int verificationTryCount = 0;
-                        while (verificationTryCount++ < 3) {
-                            String verifyJobId = verifyJobSubmission(remoteCluster, jobDetails);
-                            if (verifyJobId != null && !verifyJobId.isEmpty()) {
-                                // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
-                                jobID = verifyJobId;
-                                jobDetails.setJobID(jobID);
-                                localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
-                                        , GfacExperimentState.JOBSUBMITTED));
-                                GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.QUEUED);
-                                break;
-                            }
-                            Thread.sleep(verificationTryCount * 1000);
-                        }
-                    }
-
-                    if (jobID == null || jobID.isEmpty()) {
-                        String msg = "expId:" + jobExecutionContext.getExperimentID() + " Couldn't find remote jobId for JobName:"
-                                + jobDetails.getJobName() + ", both submit and verify steps doesn't return a valid JobId. Hence changing experiment state to Failed";
-                        log.error(msg);
-                        GFacUtils.saveErrorDetails(jobExecutionContext, msg, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                        GFacUtils.publishTaskStatus(jobExecutionContext, localEventPublisher, TaskState.FAILED);
-                        return;
-                    }
-                    data.append("jobDesc=").append(jobDescriptor.toXML());
-                    data.append(",jobId=").append(jobDetails.getJobID());
-                    monitor(jobExecutionContext);
-                } catch (SSHApiException e) {
-                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-                    log.error(error);
-                    jobDetails.setJobID("none");
-                    GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                    throw new GFacProviderException(error, e);
-                } catch (Exception e) {
-                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-                    log.error(error);
-                    jobDetails.setJobID("none");
-                    GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                    throw new GFacProviderException(error, e);
-                } finally {
-                    log.info("Saving data for future recovery: ");
-                    log.info(data.toString());
-                    GFacUtils.saveHandlerData(jobExecutionContext, data, this.getClass().getName());
-                }
-            } catch (GFacException e) {
-                throw new GFacProviderException(e.getMessage(), e);
-            }
-        }
-    }
-
-    private boolean verifyJobSubmissionByJobId(RemoteCluster remoteCluster, String jobID) throws SSHApiException {
-        JobStatus status = remoteCluster.getJobStatus(jobID);
-        return status != null &&  status != JobStatus.U;
-    }
-
-    private String verifyJobSubmission(RemoteCluster remoteCluster, JobDetails jobDetails) {
-        String jobName = jobDetails.getJobName();
-        String jobId = null;
-        try {
-          jobId  = remoteCluster.getJobIdByJobName(jobName, remoteCluster.getServerInfo().getUserName());
-        } catch (SSHApiException e) {
-            log.error("Error while verifying JobId from JobName");
-        }
-        return jobId;
-    }
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-
-    }
-
-    public boolean cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        JobDetails jobDetails = jobExecutionContext.getJobDetails();
-        StringBuffer data = new StringBuffer();
-        String hostAddress = jobExecutionContext.getHostName();
-        if (!hpcType) {
-            throw new NotImplementedException();
-        } else {
-            RemoteCluster remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-            if (remoteCluster == null) {
-                throw new GFacProviderException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-            // This installed path is a mandetory field, because this could change based on the computing resource
-            if (jobDetails == null) {
-                log.error("There is not JobDetails, Cancel request can't be performed !!!");
-                return false;
-            }
-            try {
-                if (jobDetails.getJobID() != null) {
-                    if (remoteCluster.cancelJob(jobDetails.getJobID()) != null) {
-                        // if this operation success without any exceptions, we can assume cancel operation succeeded.
-                        GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
-                        return true;
-                    } else {
-                        log.info("Job Cancel operation failed");
-                    }
-                } else {
-                    log.error("No Job Id is set, so cannot perform the cancel operation !!!");
-                    throw new GFacProviderException("Cancel request failed to cancel job as JobId is null in Job Execution Context");
-                }
-            } catch (SSHApiException e) {
-                String error = "Cancel request failed " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-                log.error(error);
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//                throw new GFacProviderException(error, e);
-            } catch (Exception e) {
-                String error = "Cancel request failed " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-                log.error(error);
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//                throw new GFacProviderException(error, e);
-            }
-            return false;
-        }
-    }
-
-    private File createShellScript(JobExecutionContext context) throws IOException {
-        String uniqueDir = jobExecutionContext.getApplicationName() + System.currentTimeMillis()
-                + new Random().nextLong();
-
-        File shellScript = File.createTempFile(uniqueDir, "sh");
-        OutputStream out = new FileOutputStream(shellScript);
-
-        out.write("#!/bin/bash\n".getBytes());
-        out.write(("cd " + jobExecutionContext.getWorkingDir() + "\n").getBytes());
-        out.write(("export " + GFacConstants.INPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getInputDir() + "\n").getBytes());
-        out.write(("export " + GFacConstants.OUTPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getOutputDir() + "\n")
-                .getBytes());
-        // get the env of the host and the application
-        List<SetEnvPaths> envPathList = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getSetEnvironment();
-        for (SetEnvPaths setEnvPaths : envPathList) {
-            log.debug("Env[" + setEnvPaths.getName() + "] = " + setEnvPaths.getValue());
-            out.write(("export " + setEnvPaths.getName() + "=" + setEnvPaths.getValue() + "\n").getBytes());
-        }
-
-        // prepare the command
-        final String SPACE = " ";
-        StringBuffer cmd = new StringBuffer();
-        cmd.append(jobExecutionContext.getExecutablePath());
-        cmd.append(SPACE);
-
-        MessageContext input = context.getInMessageContext();
-        Map<String, Object> inputs = input.getParameters();
-        Set<String> keys = inputs.keySet();
-        for (String paramName : keys) {
-            InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-            //if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-            if (inputParamType.getType() == DataType.URI) {
-                String value = inputParamType.getValue();
-                cmd.append(value);
-                cmd.append(SPACE);
-            } else {
-                String paramValue = inputParamType.getValue();
-                cmd.append(paramValue);
-                cmd.append(SPACE);
-            }
-        }
-        // We redirect the error and stdout to remote files, they will be read
-        // in later
-        cmd.append(SPACE);
-        cmd.append("1>");
-        cmd.append(SPACE);
-        cmd.append(jobExecutionContext.getStandardOutput());
-        cmd.append(SPACE);
-        cmd.append("2>");
-        cmd.append(SPACE);
-        cmd.append(jobExecutionContext.getStandardError());
-
-        String cmdStr = cmd.toString();
-        log.info("Command = " + cmdStr);
-        out.write((cmdStr + "\n").getBytes());
-        String message = "\"execuationSuceeded\"";
-        out.write(("echo " + message + "\n").getBytes());
-        out.close();
-
-        return shellScript;
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-
-    }
-
-    /**
-     * This method will read standard output and if there's any it will be parsed
-     *
-     * @param jobIDReaderCommandOutput
-     * @param errorMsg
-     * @return
-     * @throws SSHApiException
-     */
-    private String getOutputifAvailable(StandardOutReader jobIDReaderCommandOutput, String errorMsg) throws SSHApiException {
-        String stdOutputString = jobIDReaderCommandOutput.getStdOutputString();
-        String stdErrorString = jobIDReaderCommandOutput.getStdErrorString();
-
-        if (stdOutputString == null || stdOutputString.isEmpty() || (stdErrorString != null && !stdErrorString.isEmpty())) {
-            log.error("Standard Error output : " + stdErrorString);
-            throw new SSHApiException(errorMsg + stdErrorString);
-        }
-        return stdOutputString;
-    }
-
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        // have to implement the logic to recover a gfac failure
-        initialize(jobExecutionContext);
-        if(hpcType) {
-            log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
-            String hostName = jobExecutionContext.getHostName();
-            String jobId = "";
-            String jobDesc = "";
-            String jobName = "";
-            try {
-                String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
-                String[] split = pluginData.split(",");
-                if (split.length < 2) {
-                    this.execute(jobExecutionContext);
-                    return;
-                }
-                jobDesc = split[0].substring(8);
-                jobId = split[1].substring(6);
-                try {
-                    JobDescriptor jobDescriptor = JobDescriptor.fromXML(jobDesc);
-                    jobName = jobDescriptor.getJobName();
-                } catch (XmlException e) {
-                    log.error(e.getMessage(), e);
-                    log.error("Cannot parse plugin data stored, but trying to recover");
-
-                }
-                log.info("Following data have recovered: ");
-                log.info("Job Description: " + jobDesc);
-                log.info("Job Id: " + jobId);
-                if (jobName.isEmpty() || jobId.isEmpty() || "none".equals(jobId) ||
-                        "".equals(jobId)) {
-                    log.info("Cannot recover data so submitting the job again !!!");
-                    this.execute(jobExecutionContext);
-                    return;
-                }
-            } catch (Exception e) {
-                log.error("Error while  recovering provider", e);
-            }
-            try {
-                // Now we are we have enough data to recover
-                JobDetails jobDetails = new JobDetails();
-                jobDetails.setJobDescription(jobDesc);
-                jobDetails.setJobID(jobId);
-                jobDetails.setJobName(jobName);
-                jobExecutionContext.setJobDetails(jobDetails);
-                if (jobExecutionContext.getSecurityContext(hostName) == null) {
-                    try {
-                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
-                    } catch (ApplicationSettingsException e) {
-                        log.error(e.getMessage());
-                        throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-                    }
-                }
-                monitor(jobExecutionContext);
-            } catch (Exception e) {
-                log.error("Error while recover the job", e);
-                throw new GFacProviderException("Error delegating already ran job to Monitoring", e);
-            }
-        }else{
-            log.info("We do not handle non hpc recovery so we simply run the Job directly");
-            this.execute(jobExecutionContext);
-        }
-    }
-
-    @Override
-    public void monitor(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
-            String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
-            SSHJobSubmission sshJobSubmission = null;
-            try {
-                sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
-            } catch (AppCatalogException e) {
-                throw new GFacException("Error while reading compute resource", e);
-            }
-            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
-            if (monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR) {
-                try {
-                    EmailBasedMonitor emailBasedMonitor = EmailMonitorFactory.getEmailBasedMonitor(
-                            sshJobSubmission.getResourceJobManager().getResourceJobManagerType());
-                    emailBasedMonitor.addToJobMonitorMap(jobExecutionContext);
-                } catch (AiravataException e) {
-                    throw new GFacHandlerException("Error while activating email job monitoring ", e);
-                }
-                return;
-            }
-        } else {
-            throw new IllegalArgumentException("Monitoring is implemented only for SSH, "
-                    + jobExecutionContext.getPreferredJobSubmissionProtocol().name() + " is not yet implemented");
-        }
-
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.gfac.ssh.provider.impl;
+//
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.model.experiment.TaskState;
+//import org.apache.airavata.registry.cpi.AppCatalogException;
+//import org.apache.airavata.common.exception.AiravataException;
+//import org.apache.airavata.common.exception.ApplicationSettingsException;
+//import org.apache.airavata.common.utils.LocalEventPublisher;
+//import org.apache.airavata.gfac.core.GFacConstants;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.JobDescriptor;
+//import org.apache.airavata.gfac.core.SSHApiException;
+//import org.apache.airavata.gfac.core.cluster.JobStatus;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.monitor.MonitorID;
+//import org.apache.airavata.gfac.core.monitor.state.GfacExperimentStateChangeRequest;
+//import org.apache.airavata.gfac.core.provider.AbstractProvider;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.apache.airavata.gfac.core.states.GfacExperimentState;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.gsi.ssh.api.CommandExecutor;
+//import org.apache.airavata.gfac.impl.StandardOutReader;
+//import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
+//import org.apache.airavata.gfac.monitor.email.EmailMonitorFactory;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
+//import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+//import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
+//import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
+//import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+//import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+//import org.apache.airavata.model.experiment.CorrectiveAction;
+//import org.apache.airavata.model.experiment.ErrorCategory;
+//import org.apache.airavata.model.experiment.JobDetails;
+//import org.apache.airavata.model.experiment.JobState;
+//import org.apache.xmlbeans.XmlException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//import sun.reflect.generics.reflectiveObjects.NotImplementedException;
+//
+//import java.io.*;
+//import java.util.*;
+//
+///**
+// * Execute application using remote SSH
+// */
+//public class SSHProvider extends AbstractProvider {
+//    private static final Logger log = LoggerFactory.getLogger(SSHProvider.class);
+//    private RemoteCluster remoteCluster;
+//    private String jobID = null;
+//    private String taskID = null;
+//    // we keep gsisshprovider to support qsub submission incase of hpc scenario with ssh
+//    private boolean hpcType = false;
+//
+//    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
+//        try {
+//            super.initialize(jobExecutionContext);
+//            String hostAddress = jobExecutionContext.getHostName();
+//            ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
+//            ResourceJobManagerType resourceJobManagerType = resourceJobManager.getResourceJobManagerType();
+//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//                GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//            }
+//            taskID = jobExecutionContext.getTaskData().getTaskID();
+//
+//            JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+//            if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH && resourceJobManagerType == ResourceJobManagerType.FORK) {
+//                jobID = "SSH_" + jobExecutionContext.getHostName() + "_" + Calendar.getInstance().getTimeInMillis();
+//                remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//
+//                String remoteFile = jobExecutionContext.getWorkingDir() + File.separatorChar + GFacConstants.EXECUTABLE_NAME;
+//                details.setJobID(taskID);
+//                details.setJobDescription(remoteFile);
+//                jobExecutionContext.setJobDetails(details);
+//                // FIXME : Why remoteCluster is passed as null
+//                JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
+//                details.setJobDescription(jobDescriptor.toXML());
+//
+//                GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
+//                log.info(remoteFile);
+//                File runscript = createShellScript(jobExecutionContext);
+//                remoteCluster.scpTo(remoteFile, runscript.getAbsolutePath());
+//            } else {
+//                hpcType = true;
+//            }
+//        } catch (ApplicationSettingsException e) {
+//            log.error(e.getMessage());
+//            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//        } catch (Exception e) {
+//            throw new GFacProviderException(e.getLocalizedMessage(), e);
+//        }
+//    }
+//
+//
+//    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+//        if (!hpcType) {
+//            try {
+//                /*
+//                 * Execute
+//                 */
+//                String executable = jobExecutionContext.getWorkingDir() + File.separatorChar + GFacConstants.EXECUTABLE_NAME;
+//                details.setJobDescription(executable);
+//                RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + executable + "; " + executable);
+//                StandardOutReader jobIDReaderCommandOutput = new StandardOutReader();
+//                log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
+//                CommandExecutor.executeCommand(rawCommandInfo, remoteCluster.getSession(), jobIDReaderCommandOutput);
+//                String stdOutputString = getOutputifAvailable(jobIDReaderCommandOutput, "Error submitting job to resource");
+//                log.info("stdout=" + stdOutputString);
+//            } catch (Exception e) {
+//                throw new GFacProviderException(e.getMessage(), e);
+//            }
+//        } else {
+//            try {
+//                StringBuffer data = new StringBuffer();
+//                JobDetails jobDetails = new JobDetails();
+//                String hostAddress = jobExecutionContext.getHostName();
+//                LocalEventPublisher localEventPublisher = jobExecutionContext.getLocalEventPublisher();
+//                try {
+//                    RemoteCluster remoteCluster = null;
+//                    if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
+//                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//                    }
+//                    remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//                    if (remoteCluster == null) {
+//                        throw new GFacProviderException("Security context is not set properly");
+//                    } else {
+//                        log.info("Successfully retrieved the Security Context");
+//                    }
+//                    // This installed path is a mandetory field, because this could change based on the computing resource
+//                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
+//                    jobDetails.setJobName(jobDescriptor.getJobName());
+//                    log.info(jobDescriptor.toXML());
+//                    jobDetails.setJobDescription(jobDescriptor.toXML());
+//                    String jobID = remoteCluster.submitBatchJob(jobDescriptor);
+//                    if (jobID != null && !jobID.isEmpty()) {
+//                        jobDetails.setJobID(jobID);
+//                        GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SUBMITTED);
+//                                localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
+//                                        , GfacExperimentState.JOBSUBMITTED));
+//                        jobExecutionContext.setJobDetails(jobDetails);
+//                        if (verifyJobSubmissionByJobId(remoteCluster, jobID)) {
+//                            localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
+//                                    , GfacExperimentState.JOBSUBMITTED));
+//                            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.QUEUED);
+//                        }
+//                    } else {
+//                        jobExecutionContext.setJobDetails(jobDetails);
+//                        int verificationTryCount = 0;
+//                        while (verificationTryCount++ < 3) {
+//                            String verifyJobId = verifyJobSubmission(remoteCluster, jobDetails);
+//                            if (verifyJobId != null && !verifyJobId.isEmpty()) {
+//                                // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
+//                                jobID = verifyJobId;
+//                                jobDetails.setJobID(jobID);
+//                                localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
+//                                        , GfacExperimentState.JOBSUBMITTED));
+//                                GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.QUEUED);
+//                                break;
+//                            }
+//                            Thread.sleep(verificationTryCount * 1000);
+//                        }
+//                    }
+//
+//                    if (jobID == null || jobID.isEmpty()) {
+//                        String msg = "expId:" + jobExecutionContext.getExperimentID() + " Couldn't find remote jobId for JobName:"
+//                                + jobDetails.getJobName() + ", both submit and verify steps doesn't return a valid JobId. Hence changing experiment state to Failed";
+//                        log.error(msg);
+//                        GFacUtils.saveErrorDetails(jobExecutionContext, msg, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//                        GFacUtils.publishTaskStatus(jobExecutionContext, localEventPublisher, TaskState.FAILED);
+//                        return;
+//                    }
+//                    data.append("jobDesc=").append(jobDescriptor.toXML());
+//                    data.append(",jobId=").append(jobDetails.getJobID());
+//                    monitor(jobExecutionContext);
+//                } catch (SSHApiException e) {
+//                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
+//                    log.error(error);
+//                    jobDetails.setJobID("none");
+//                    GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
+//                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//                    throw new GFacProviderException(error, e);
+//                } catch (Exception e) {
+//                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
+//                    log.error(error);
+//                    jobDetails.setJobID("none");
+//                    GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
+//                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//                    throw new GFacProviderException(error, e);
+//                } finally {
+//                    log.info("Saving data for future recovery: ");
+//                    log.info(data.toString());
+//                    GFacUtils.saveHandlerData(jobExecutionContext, data, this.getClass().getName());
+//                }
+//            } catch (GFacException e) {
+//                throw new GFacProviderException(e.getMessage(), e);
+//            }
+//        }
+//    }
+//
+//    private boolean verifyJobSubmissionByJobId(RemoteCluster remoteCluster, String jobID) throws SSHApiException {
+//        JobStatus status = remoteCluster.getJobStatus(jobID);
+//        return status != null &&  status != JobStatus.U;
+//    }
+//
+//    private String verifyJobSubmission(RemoteCluster remoteCluster, JobDetails jobDetails) {
+//        String jobName = jobDetails.getJobName();
+//        String jobId = null;
+//        try {
+//          jobId  = remoteCluster.getJobIdByJobName(jobName, remoteCluster.getServerInfo().getUserName());
+//        } catch (SSHApiException e) {
+//            log.error("Error while verifying JobId from JobName");
+//        }
+//        return jobId;
+//    }
+//
+//    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+//
+//    }
+//
+//    public boolean cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
+//        JobDetails jobDetails = jobExecutionContext.getJobDetails();
+//        StringBuffer data = new StringBuffer();
+//        String hostAddress = jobExecutionContext.getHostName();
+//        if (!hpcType) {
+//            throw new NotImplementedException();
+//        } else {
+//            RemoteCluster remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+//            if (remoteCluster == null) {
+//                throw new GFacProviderException("Security context is not set properly");
+//            } else {
+//                log.info("Successfully retrieved the Security Context");
+//            }
+//            // This installed path is a mandetory field, because this could change based on the computing resource
+//            if (jobDetails == null) {
+//                log.error("There is not JobDetails, Cancel request can't be performed !!!");
+//                return false;
+//            }
+//            try {
+//                if (jobDetails.getJobID() != null) {
+//                    if (remoteCluster.cancelJob(jobDetails.getJobID()) != null) {
+//                        // if this operation success without any exceptions, we can assume cancel operation succeeded.
+//                        GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
+//                        return true;
+//                    } else {
+//                        log.info("Job Cancel operation failed");
+//                    }
+//                } else {
+//                    log.error("No Job Id is set, so cannot perform the cancel operation !!!");
+//                    throw new GFacProviderException("Cancel request failed to cancel job as JobId is null in Job Execution Context");
+//                }
+//            } catch (SSHApiException e) {
+//                String error = "Cancel request failed " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
+//                log.error(error);
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+////                throw new GFacProviderException(error, e);
+//            } catch (Exception e) {
+//                String error = "Cancel request failed " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
+//                log.error(error);
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+////                throw new GFacProviderException(error, e);
+//            }
+//            return false;
+//        }
+//    }
+//
+//    private File createShellScript(JobExecutionContext context) throws IOException {
+//        String uniqueDir = jobExecutionContext.getApplicationName() + System.currentTimeMillis()
+//                + new Random().nextLong();
+//
+//        File shellScript = File.createTempFile(uniqueDir, "sh");
+//        OutputStream out = new FileOutputStream(shellScript);
+//
+//        out.write("#!/bin/bash\n".getBytes());
+//        out.write(("cd " + jobExecutionContext.getWorkingDir() + "\n").getBytes());
+//        out.write(("export " + GFacConstants.INPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getInputDir() + "\n").getBytes());
+//        out.write(("export " + GFacConstants.OUTPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getOutputDir() + "\n")
+//                .getBytes());
+//        // get the env of the host and the application
+//        List<SetEnvPaths> envPathList = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getSetEnvironment();
+//        for (SetEnvPaths setEnvPaths : envPathList) {
+//            log.debug("Env[" + setEnvPaths.getName() + "] = " + setEnvPaths.getValue());
+//            out.write(("export " + setEnvPaths.getName() + "=" + setEnvPaths.getValue() + "\n").getBytes());
+//        }
+//
+//        // prepare the command
+//        final String SPACE = " ";
+//        StringBuffer cmd = new StringBuffer();
+//        cmd.append(jobExecutionContext.getExecutablePath());
+//        cmd.append(SPACE);
+//
+//        MessageContext input = context.getInMessageContext();
+//        Map<String, Object> inputs = input.getParameters();
+//        Set<String> keys = inputs.keySet();
+//        for (String paramName : keys) {
+//            InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
+//            //if ("URIArray".equals(actualParameter.getType().getType().toString())) {
+//            if (inputParamType.getType() == DataType.URI) {
+//                String value = inputParamType.getValue();
+//                cmd.append(value);
+//                cmd.append(SPACE);
+//            } else {
+//                String paramValue = inputParamType.getValue();
+//                cmd.append(paramValue);
+//                cmd.append(SPACE);
+//            }
+//        }
+//        // We redirect the error and stdout to remote files, they will be read
+//        // in later
+//        cmd.append(SPACE);
+//        cmd.append("1>");
+//        cmd.append(SPACE);
+//        cmd.append(jobExecutionContext.getStandardOutput());
+//        cmd.append(SPACE);
+//        cmd.append("2>");
+//        cmd.append(SPACE);
+//        cmd.append(jobExecutionContext.getStandardError());
+//
+//        String cmdStr = cmd.toString();
+//        log.info("Command = " + cmdStr);
+//        out.write((cmdStr + "\n").getBytes());
+//        String message = "\"execuationSuceeded\"";
+//        out.write(("echo " + message + "\n").getBytes());
+//        out.close();
+//
+//        return shellScript;
+//    }
+//
+//    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
+//
+//    }
+//
+//    /**
+//     * This method will read standard output and if there's any it will be parsed
+//     *
+//     * @param jobIDReaderCommandOutput
+//     * @param errorMsg
+//     * @return
+//     * @throws SSHApiException
+//     */
+//    private String getOutputifAvailable(StandardOutReader jobIDReaderCommandOutput, String errorMsg) throws SSHApiException {
+//        String stdOutputString = jobIDReaderCommandOutput.getStdOutputString();
+//        String stdErrorString = jobIDReaderCommandOutput.getStdErrorString();
+//
+//        if (stdOutputString == null || stdOutputString.isEmpty() || (stdErrorString != null && !stdErrorString.isEmpty())) {
+//            log.error("Standard Error output : " + stdErrorString);
+//            throw new SSHApiException(errorMsg + stdErrorString);
+//        }
+//        return stdOutputString;
+//    }
+//
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
+//        // have to implement the logic to recover a gfac failure
+//        initialize(jobExecutionContext);
+//        if(hpcType) {
+//            log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
+//            String hostName = jobExecutionContext.getHostName();
+//            String jobId = "";
+//            String jobDesc = "";
+//            String jobName = "";
+//            try {
+//                String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
+//                String[] split = pluginData.split(",");
+//                if (split.length < 2) {
+//                    this.execute(jobExecutionContext);
+//                    return;
+//                }
+//                jobDesc = split[0].substring(8);
+//                jobId = split[1].substring(6);
+//                try {
+//                    JobDescriptor jobDescriptor = JobDescriptor.fromXML(jobDesc);
+//                    jobName = jobDescriptor.getJobName();
+//                } catch (XmlException e) {
+//                    log.error(e.getMessage(), e);
+//                    log.error("Cannot parse plugin data stored, but trying to recover");
+//
+//                }
+//                log.info("Following data have recovered: ");
+//                log.info("Job Description: " + jobDesc);
+//                log.info("Job Id: " + jobId);
+//                if (jobName.isEmpty() || jobId.isEmpty() || "none".equals(jobId) ||
+//                        "".equals(jobId)) {
+//                    log.info("Cannot recover data so submitting the job again !!!");
+//                    this.execute(jobExecutionContext);
+//                    return;
+//                }
+//            } catch (Exception e) {
+//                log.error("Error while  recovering provider", e);
+//            }
+//            try {
+//                // Now we are we have enough data to recover
+//                JobDetails jobDetails = new JobDetails();
+//                jobDetails.setJobDescription(jobDesc);
+//                jobDetails.setJobID(jobId);
+//                jobDetails.setJobName(jobName);
+//                jobExecutionContext.setJobDetails(jobDetails);
+//                if (jobExecutionContext.getSecurityContext(hostName) == null) {
+//                    try {
+//                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
+//                    } catch (ApplicationSettingsException e) {
+//                        log.error(e.getMessage());
+//                        throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+//                    }
+//                }
+//                monitor(jobExecutionContext);
+//            } catch (Exception e) {
+//                log.error("Error while recover the job", e);
+//                throw new GFacProviderException("Error delegating already ran job to Monitoring", e);
+//            }
+//        }else{
+//            log.info("We do not handle non hpc recovery so we simply run the Job directly");
+//            this.execute(jobExecutionContext);
+//        }
+//    }
+//
+//    @Override
+//    public void monitor(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
+//        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
+//            String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
+//            SSHJobSubmission sshJobSubmission = null;
+//            try {
+//                sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
+//            } catch (AppCatalogException e) {
+//                throw new GFacException("Error while reading compute resource", e);
+//            }
+//            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
+//            if (monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR) {
+//                try {
+//                    EmailBasedMonitor emailBasedMonitor = EmailMonitorFactory.getEmailBasedMonitor(
+//                            sshJobSubmission.getResourceJobManager().getResourceJobManagerType());
+//                    emailBasedMonitor.addToJobMonitorMap(jobExecutionContext);
+//                } catch (AiravataException e) {
+//                    throw new GFacHandlerException("Error while activating email job monitoring ", e);
+//                }
+//                return;
+//            }
+//        } else {
+//            throw new IllegalArgumentException("Monitoring is implemented only for SSH, "
+//                    + jobExecutionContext.getPreferredJobSubmissionProtocol().name() + " is not yet implemented");
+//        }
+//
+//    }
+//}


[4/5] airavata git commit: fixing compilation issues

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalInputHandler.java
index 884ccd5..21991fd 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalInputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/handler/LocalInputHandler.java
@@ -1,92 +1,92 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.local.handler;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.commons.io.FileUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-import java.util.Properties;
-
-
-public class LocalInputHandler extends AbstractHandler {
-    private static final Logger logger = LoggerFactory.getLogger(LocalInputHandler.class);
-    @Override
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        super.invoke(jobExecutionContext);
-        Map<String, Object> inputParameters = jobExecutionContext.getInMessageContext().getParameters();
-        for (Map.Entry<String, Object> inputParamEntry : inputParameters.entrySet()) {
-            if (inputParamEntry.getValue() instanceof InputDataObjectType) {
-                InputDataObjectType inputDataObject = (InputDataObjectType) inputParamEntry.getValue();
-                if (inputDataObject.getType() == DataType.URI
-                        && inputDataObject != null
-                        && !inputDataObject.getValue().equals("")) {
-                    try {
-                        inputDataObject.setValue(stageFile(jobExecutionContext.getInputDir(), inputDataObject.getValue()));
-                    } catch (IOException e) {
-                        throw new GFacHandlerException("Error while data staging sourceFile= " + inputDataObject.getValue());
-                    }
-                }
-            }
-        }
-    }
-
-    private String stageFile(String inputDir, String sourceFilePath) throws IOException {
-        int i = sourceFilePath.lastIndexOf(File.separator);
-        String substring = sourceFilePath.substring(i + 1);
-        if (inputDir.endsWith("/")) {
-            inputDir = inputDir.substring(0, inputDir.length() - 1);
-        }
-        String targetFilePath = inputDir + File.separator + substring;
-
-        if (sourceFilePath.startsWith("file")) {
-            sourceFilePath = sourceFilePath.substring(sourceFilePath.indexOf(":") + 1, sourceFilePath.length());
-        }
-
-        File sourceFile = new File(sourceFilePath);
-        File targetFile = new File(targetFilePath);
-        if (targetFile.exists()) {
-            targetFile.delete();
-        }
-        logger.info("staging source file : " + sourceFilePath + " to target file : " + targetFilePath);
-        FileUtils.copyFile(sourceFile, targetFile);
-
-        return targetFilePath;
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-
-    }
-
-    @Override
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.local.handler;
+//
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.commons.io.FileUtils;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.IOException;
+//import java.util.Map;
+//import java.util.Properties;
+//
+//
+//public class LocalInputHandler extends AbstractHandler {
+//    private static final Logger logger = LoggerFactory.getLogger(LocalInputHandler.class);
+//    @Override
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        super.invoke(jobExecutionContext);
+//        Map<String, Object> inputParameters = jobExecutionContext.getInMessageContext().getParameters();
+//        for (Map.Entry<String, Object> inputParamEntry : inputParameters.entrySet()) {
+//            if (inputParamEntry.getValue() instanceof InputDataObjectType) {
+//                InputDataObjectType inputDataObject = (InputDataObjectType) inputParamEntry.getValue();
+//                if (inputDataObject.getType() == DataType.URI
+//                        && inputDataObject != null
+//                        && !inputDataObject.getValue().equals("")) {
+//                    try {
+//                        inputDataObject.setValue(stageFile(jobExecutionContext.getInputDir(), inputDataObject.getValue()));
+//                    } catch (IOException e) {
+//                        throw new GFacHandlerException("Error while data staging sourceFile= " + inputDataObject.getValue());
+//                    }
+//                }
+//            }
+//        }
+//    }
+//
+//    private String stageFile(String inputDir, String sourceFilePath) throws IOException {
+//        int i = sourceFilePath.lastIndexOf(File.separator);
+//        String substring = sourceFilePath.substring(i + 1);
+//        if (inputDir.endsWith("/")) {
+//            inputDir = inputDir.substring(0, inputDir.length() - 1);
+//        }
+//        String targetFilePath = inputDir + File.separator + substring;
+//
+//        if (sourceFilePath.startsWith("file")) {
+//            sourceFilePath = sourceFilePath.substring(sourceFilePath.indexOf(":") + 1, sourceFilePath.length());
+//        }
+//
+//        File sourceFile = new File(sourceFilePath);
+//        File targetFile = new File(targetFilePath);
+//        if (targetFile.exists()) {
+//            targetFile.delete();
+//        }
+//        logger.info("staging source file : " + sourceFilePath + " to target file : " + targetFilePath);
+//        FileUtils.copyFile(sourceFile, targetFile);
+//
+//        return targetFilePath;
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//
+//    }
+//
+//    @Override
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
index 5bd75e5..2ea6518 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
@@ -1,309 +1,309 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.local.provider.impl;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.apache.airavata.gfac.core.GFacConstants;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.impl.OutputUtils;
-import org.apache.airavata.gfac.local.utils.InputStreamToFileWriter;
-import org.apache.airavata.gfac.local.utils.InputUtils;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
-import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.messaging.event.JobIdentifier;
-import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-import org.apache.airavata.model.messaging.event.TaskIdentifier;
-import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
-import org.apache.airavata.model.experiment.JobDetails;
-import org.apache.airavata.model.experiment.JobState;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
-import org.apache.xmlbeans.XmlException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-public class LocalProvider extends AbstractProvider {
-    private static final Logger log = LoggerFactory.getLogger(LocalProvider.class);
-    private ProcessBuilder builder;
-    private List<String> cmdList;
-    private String jobId;
-    
-    public static class LocalProviderJobData{
-    	private String applicationName;
-    	private List<String> inputParameters;
-    	private String workingDir;
-    	private String inputDir;
-    	private String outputDir;
-		public String getApplicationName() {
-			return applicationName;
-		}
-		public void setApplicationName(String applicationName) {
-			this.applicationName = applicationName;
-		}
-		public List<String> getInputParameters() {
-			return inputParameters;
-		}
-		public void setInputParameters(List<String> inputParameters) {
-			this.inputParameters = inputParameters;
-		}
-		public String getWorkingDir() {
-			return workingDir;
-		}
-		public void setWorkingDir(String workingDir) {
-			this.workingDir = workingDir;
-		}
-		public String getInputDir() {
-			return inputDir;
-		}
-		public void setInputDir(String inputDir) {
-			this.inputDir = inputDir;
-		}
-		public String getOutputDir() {
-			return outputDir;
-		}
-		public void setOutputDir(String outputDir) {
-			this.outputDir = outputDir;
-		}
-    }
-    public LocalProvider(){
-        cmdList = new ArrayList<String>();
-    }
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
-    	super.initialize(jobExecutionContext);
-
-        // build command with all inputs
-        buildCommand();
-        initProcessBuilder(jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription());
-
-        // extra environment variables
-        builder.environment().put(GFacConstants.INPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getInputDir());
-        builder.environment().put(GFacConstants.OUTPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getOutputDir());
-
-        // set working directory
-        builder.directory(new File(jobExecutionContext.getWorkingDir()));
-
-        // log info
-        log.info("Command = " + InputUtils.buildCommand(cmdList));
-        log.info("Working dir = " + builder.directory());
-        /*for (String key : builder.environment().keySet()) {
-            log.info("Env[" + key + "] = " + builder.environment().get(key));
-        }*/
-    }
-
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        JobDetails jobDetails = new JobDetails();
-        try {
-        	jobId = jobExecutionContext.getTaskData().getTaskID();
-            jobDetails.setJobID(jobId);
-            jobDetails.setJobDescription(jobExecutionContext.getApplicationContext()
-                    .getApplicationDeploymentDescription().getAppDeploymentDescription());
-            jobExecutionContext.setJobDetails(jobDetails);
-            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SETUP);
-            // running cmd
-            Process process = builder.start();
-
-            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), jobExecutionContext.getStandardOutput());
-            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), jobExecutionContext.getStandardError());
-
-            // start output threads
-            standardOutWriter.setDaemon(true);
-            standardErrorWriter.setDaemon(true);
-            standardOutWriter.start();
-            standardErrorWriter.start();
-
-            int returnValue = process.waitFor();
-
-            // make sure other two threads are done
-            standardOutWriter.join();
-            standardErrorWriter.join();
-
-            /*
-             * check return value. usually not very helpful to draw conclusions based on return values so don't bother.
-             * just provide warning in the log messages
-             */
-            if (returnValue != 0) {
-                log.error("Process finished with non zero return value. Process may have failed");
-            } else {
-                log.info("Process finished with return value of zero.");
-            }
-
-            StringBuffer buf = new StringBuffer();
-            buf.append("Executed ").append(InputUtils.buildCommand(cmdList))
-                    .append(" on the localHost, working directory = ").append(jobExecutionContext.getWorkingDir())
-                    .append(" tempDirectory = ").append(jobExecutionContext.getScratchLocation()).append(" With the status ")
-                    .append(String.valueOf(returnValue));
-
-            log.info(buf.toString());
-
-            // updating the job status to complete because there's nothing to monitor in local jobs
-//            MonitorID monitorID = createMonitorID(jobExecutionContext);
-            JobIdentifier jobIdentity = new JobIdentifier(jobExecutionContext.getJobDetails().getJobID(),
-                    jobExecutionContext.getTaskData().getTaskID(),
-                    jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
-                    jobExecutionContext.getExperimentID(),
-                    jobExecutionContext.getGatewayID());
-            jobExecutionContext.getLocalEventPublisher().publish(new JobStatusChangeEvent(JobState.COMPLETE, jobIdentity));
-        } catch (IOException io) {
-            throw new GFacProviderException(io.getMessage(), io);
-        } catch (InterruptedException e) {
-            throw new GFacProviderException(e.getMessage(), e);
-        }catch (GFacException e) {
-            throw new GFacProviderException(e.getMessage(), e);
-        }
-    }
-
-//	private MonitorID createMonitorID(JobExecutionContext jobExecutionContext) {
-//		MonitorID monitorID = new MonitorID(jobExecutionContext.getApplicationContext().getHostDescription(), jobId,
-//		        jobExecutionContext.getTaskData().getTaskID(),
-//		        jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getExperimentID(),
-//		        jobExecutionContext.getExperiment().getUserName(),jobId);
-//		return monitorID;
-//	}
-
-//	private void saveApplicationJob(JobExecutionContext jobExecutionContext)
-//			throws GFacProviderException {
-//		ApplicationDeploymentDescriptionType app = jobExecutionContext.
-//                getApplicationContext().getApplicationDeploymentDescription().getType();
-//		ApplicationJob appJob = GFacUtils.createApplicationJob(jobExecutionContext);
-//		appJob.setJobId(jobId);
-//		LocalProviderJobData data = new LocalProviderJobData();
-//		data.setApplicationName(app.getExecutableLocation());
-//		data.setInputDir(app.getInputDataDirectory());
-//		data.setOutputDir(app.getOutputDataDirectory());
-//		data.setWorkingDir(builder.directory().toString());
-//		data.setInputParameters(ProviderUtils.getInputParameters(jobExecutionContext));
-//		ByteArrayOutputStream stream = new ByteArrayOutputStream();
-//		JAXB.marshal(data, stream);
-//		appJob.setJobData(stream.toString());
-//		appJob.setSubmittedTime(Calendar.getInstance().getTime());
-//		appJob.setStatus(ApplicationJobStatus.SUBMITTED);
-//		appJob.setStatusUpdateTime(appJob.getSubmittedTime());
-//		GFacUtils.recordApplicationJob(jobExecutionContext, appJob);
-//	}
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        try {
-        	List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-            String stdOutStr = GFacUtils.readFileToString(jobExecutionContext.getStandardOutput());
-            String stdErrStr = GFacUtils.readFileToString(jobExecutionContext.getStandardError());
-			Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-            OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
-            TaskDetails taskDetails = (TaskDetails) experimentCatalog.get(ExperimentCatalogModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
-            if (taskDetails != null){
-                taskDetails.setApplicationOutputs(outputArray);
-                experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, taskDetails, taskDetails.getTaskID());
-            }
-            experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-            TaskIdentifier taskIdentity = new TaskIdentifier(jobExecutionContext.getTaskData().getTaskID(),
-                    jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
-                    jobExecutionContext.getExperimentID(),
-                    jobExecutionContext.getGatewayID());
-            jobExecutionContext.getLocalEventPublisher().publish(new TaskOutputChangeEvent(outputArray, taskIdentity));
-        } catch (XmlException e) {
-            throw new GFacProviderException("Cannot read output:" + e.getMessage(), e);
-        } catch (IOException io) {
-            throw new GFacProviderException(io.getMessage(), io);
-        } catch (Exception e){
-        	throw new GFacProviderException("Error in retrieving results",e);
-        }
-    }
-
-    public boolean cancelJob(JobExecutionContext jobExecutionContext) throws GFacException {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        // TODO: Auto generated method body.
-    }
-
-    @Override
-    public void monitor(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        // TODO: Auto generated method body.
-    }
-
-
-    private void buildCommand() {
-        cmdList.add(jobExecutionContext.getExecutablePath());
-        Map<String, Object> inputParameters = jobExecutionContext.getInMessageContext().getParameters();
-
-        // sort the inputs first and then build the command List
-        Comparator<InputDataObjectType> inputOrderComparator = new Comparator<InputDataObjectType>() {
-            @Override
-            public int compare(InputDataObjectType inputDataObjectType, InputDataObjectType t1) {
-                return inputDataObjectType.getInputOrder() - t1.getInputOrder();
-            }
-        };
-        Set<InputDataObjectType> sortedInputSet = new TreeSet<InputDataObjectType>(inputOrderComparator);
-        for (Object object : inputParameters.values()) {
-            if (object instanceof InputDataObjectType) {
-                InputDataObjectType inputDOT = (InputDataObjectType) object;
-                sortedInputSet.add(inputDOT);
-            }
-        }
-        for (InputDataObjectType inputDataObjectType : sortedInputSet) {
-            if (inputDataObjectType.getApplicationArgument() != null
-                    && !inputDataObjectType.getApplicationArgument().equals("")) {
-                cmdList.add(inputDataObjectType.getApplicationArgument());
-            }
-
-            if (inputDataObjectType.getValue() != null
-                    && !inputDataObjectType.getValue().equals("")) {
-                cmdList.add(inputDataObjectType.getValue());
-            }
-        }
-
-    }
-
-    private void initProcessBuilder(ApplicationDeploymentDescription app){
-        builder = new ProcessBuilder(cmdList);
-
-        List<SetEnvPaths> setEnvironment = app.getSetEnvironment();
-        if (setEnvironment != null) {
-            for (SetEnvPaths envPath : setEnvironment) {
-                Map<String,String> builderEnv = builder.environment();
-                builderEnv.put(envPath.getName(), envPath.getValue());
-            }
-        }
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.local.provider.impl;
+//
+//import java.io.File;
+//import java.io.IOException;
+//import java.util.ArrayList;
+//import java.util.Comparator;
+//import java.util.List;
+//import java.util.Map;
+//import java.util.Set;
+//import java.util.TreeSet;
+//
+//import org.apache.airavata.gfac.core.GFacConstants;
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.provider.AbstractProvider;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.impl.OutputUtils;
+//import org.apache.airavata.gfac.local.utils.InputStreamToFileWriter;
+//import org.apache.airavata.gfac.local.utils.InputUtils;
+//import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+//import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+//import org.apache.airavata.model.messaging.event.JobIdentifier;
+//import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
+//import org.apache.airavata.model.messaging.event.TaskIdentifier;
+//import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
+//import org.apache.airavata.model.experiment.JobDetails;
+//import org.apache.airavata.model.experiment.JobState;
+//import org.apache.airavata.model.experiment.TaskDetails;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
+//import org.apache.xmlbeans.XmlException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import sun.reflect.generics.reflectiveObjects.NotImplementedException;
+//
+//public class LocalProvider extends AbstractProvider {
+//    private static final Logger log = LoggerFactory.getLogger(LocalProvider.class);
+//    private ProcessBuilder builder;
+//    private List<String> cmdList;
+//    private String jobId;
+//
+//    public static class LocalProviderJobData{
+//    	private String applicationName;
+//    	private List<String> inputParameters;
+//    	private String workingDir;
+//    	private String inputDir;
+//    	private String outputDir;
+//		public String getApplicationName() {
+//			return applicationName;
+//		}
+//		public void setApplicationName(String applicationName) {
+//			this.applicationName = applicationName;
+//		}
+//		public List<String> getInputParameters() {
+//			return inputParameters;
+//		}
+//		public void setInputParameters(List<String> inputParameters) {
+//			this.inputParameters = inputParameters;
+//		}
+//		public String getWorkingDir() {
+//			return workingDir;
+//		}
+//		public void setWorkingDir(String workingDir) {
+//			this.workingDir = workingDir;
+//		}
+//		public String getInputDir() {
+//			return inputDir;
+//		}
+//		public void setInputDir(String inputDir) {
+//			this.inputDir = inputDir;
+//		}
+//		public String getOutputDir() {
+//			return outputDir;
+//		}
+//		public void setOutputDir(String outputDir) {
+//			this.outputDir = outputDir;
+//		}
+//    }
+//    public LocalProvider(){
+//        cmdList = new ArrayList<String>();
+//    }
+//
+//    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
+//    	super.initialize(jobExecutionContext);
+//
+//        // build command with all inputs
+//        buildCommand();
+//        initProcessBuilder(jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription());
+//
+//        // extra environment variables
+//        builder.environment().put(GFacConstants.INPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getInputDir());
+//        builder.environment().put(GFacConstants.OUTPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getOutputDir());
+//
+//        // set working directory
+//        builder.directory(new File(jobExecutionContext.getWorkingDir()));
+//
+//        // log info
+//        log.info("Command = " + InputUtils.buildCommand(cmdList));
+//        log.info("Working dir = " + builder.directory());
+//        /*for (String key : builder.environment().keySet()) {
+//            log.info("Env[" + key + "] = " + builder.environment().get(key));
+//        }*/
+//    }
+//
+//    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+//        JobDetails jobDetails = new JobDetails();
+//        try {
+//        	jobId = jobExecutionContext.getTaskData().getTaskID();
+//            jobDetails.setJobID(jobId);
+//            jobDetails.setJobDescription(jobExecutionContext.getApplicationContext()
+//                    .getApplicationDeploymentDescription().getAppDeploymentDescription());
+//            jobExecutionContext.setJobDetails(jobDetails);
+//            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SETUP);
+//            // running cmd
+//            Process process = builder.start();
+//
+//            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), jobExecutionContext.getStandardOutput());
+//            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), jobExecutionContext.getStandardError());
+//
+//            // start output threads
+//            standardOutWriter.setDaemon(true);
+//            standardErrorWriter.setDaemon(true);
+//            standardOutWriter.start();
+//            standardErrorWriter.start();
+//
+//            int returnValue = process.waitFor();
+//
+//            // make sure other two threads are done
+//            standardOutWriter.join();
+//            standardErrorWriter.join();
+//
+//            /*
+//             * check return value. usually not very helpful to draw conclusions based on return values so don't bother.
+//             * just provide warning in the log messages
+//             */
+//            if (returnValue != 0) {
+//                log.error("Process finished with non zero return value. Process may have failed");
+//            } else {
+//                log.info("Process finished with return value of zero.");
+//            }
+//
+//            StringBuffer buf = new StringBuffer();
+//            buf.append("Executed ").append(InputUtils.buildCommand(cmdList))
+//                    .append(" on the localHost, working directory = ").append(jobExecutionContext.getWorkingDir())
+//                    .append(" tempDirectory = ").append(jobExecutionContext.getScratchLocation()).append(" With the status ")
+//                    .append(String.valueOf(returnValue));
+//
+//            log.info(buf.toString());
+//
+//            // updating the job status to complete because there's nothing to monitor in local jobs
+////            MonitorID monitorID = createMonitorID(jobExecutionContext);
+//            JobIdentifier jobIdentity = new JobIdentifier(jobExecutionContext.getJobDetails().getJobID(),
+//                    jobExecutionContext.getTaskData().getTaskID(),
+//                    jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
+//                    jobExecutionContext.getExperimentID(),
+//                    jobExecutionContext.getGatewayID());
+//            jobExecutionContext.getLocalEventPublisher().publish(new JobStatusChangeEvent(JobState.COMPLETE, jobIdentity));
+//        } catch (IOException io) {
+//            throw new GFacProviderException(io.getMessage(), io);
+//        } catch (InterruptedException e) {
+//            throw new GFacProviderException(e.getMessage(), e);
+//        }catch (GFacException e) {
+//            throw new GFacProviderException(e.getMessage(), e);
+//        }
+//    }
+//
+////	private MonitorID createMonitorID(JobExecutionContext jobExecutionContext) {
+////		MonitorID monitorID = new MonitorID(jobExecutionContext.getApplicationContext().getHostDescription(), jobId,
+////		        jobExecutionContext.getTaskData().getTaskID(),
+////		        jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getExperimentID(),
+////		        jobExecutionContext.getExperiment().getUserName(),jobId);
+////		return monitorID;
+////	}
+//
+////	private void saveApplicationJob(JobExecutionContext jobExecutionContext)
+////			throws GFacProviderException {
+////		ApplicationDeploymentDescriptionType app = jobExecutionContext.
+////                getApplicationContext().getApplicationDeploymentDescription().getType();
+////		ApplicationJob appJob = GFacUtils.createApplicationJob(jobExecutionContext);
+////		appJob.setJobId(jobId);
+////		LocalProviderJobData data = new LocalProviderJobData();
+////		data.setApplicationName(app.getExecutableLocation());
+////		data.setInputDir(app.getInputDataDirectory());
+////		data.setOutputDir(app.getOutputDataDirectory());
+////		data.setWorkingDir(builder.directory().toString());
+////		data.setInputParameters(ProviderUtils.getInputParameters(jobExecutionContext));
+////		ByteArrayOutputStream stream = new ByteArrayOutputStream();
+////		JAXB.marshal(data, stream);
+////		appJob.setJobData(stream.toString());
+////		appJob.setSubmittedTime(Calendar.getInstance().getTime());
+////		appJob.setStatus(ApplicationJobStatus.SUBMITTED);
+////		appJob.setStatusUpdateTime(appJob.getSubmittedTime());
+////		GFacUtils.recordApplicationJob(jobExecutionContext, appJob);
+////	}
+//
+//    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+//        try {
+//        	List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
+//            String stdOutStr = GFacUtils.readFileToString(jobExecutionContext.getStandardOutput());
+//            String stdErrStr = GFacUtils.readFileToString(jobExecutionContext.getStandardError());
+//			Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
+//            OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
+//            TaskDetails taskDetails = (TaskDetails) experimentCatalog.get(ExperimentCatalogModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
+//            if (taskDetails != null){
+//                taskDetails.setApplicationOutputs(outputArray);
+//                experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, taskDetails, taskDetails.getTaskID());
+//            }
+//            experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
+//            TaskIdentifier taskIdentity = new TaskIdentifier(jobExecutionContext.getTaskData().getTaskID(),
+//                    jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
+//                    jobExecutionContext.getExperimentID(),
+//                    jobExecutionContext.getGatewayID());
+//            jobExecutionContext.getLocalEventPublisher().publish(new TaskOutputChangeEvent(outputArray, taskIdentity));
+//        } catch (XmlException e) {
+//            throw new GFacProviderException("Cannot read output:" + e.getMessage(), e);
+//        } catch (IOException io) {
+//            throw new GFacProviderException(io.getMessage(), io);
+//        } catch (Exception e){
+//        	throw new GFacProviderException("Error in retrieving results",e);
+//        }
+//    }
+//
+//    public boolean cancelJob(JobExecutionContext jobExecutionContext) throws GFacException {
+//        throw new NotImplementedException();
+//    }
+//
+//    @Override
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//    @Override
+//    public void monitor(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
+//        // TODO: Auto generated method body.
+//    }
+//
+//
+//    private void buildCommand() {
+//        cmdList.add(jobExecutionContext.getExecutablePath());
+//        Map<String, Object> inputParameters = jobExecutionContext.getInMessageContext().getParameters();
+//
+//        // sort the inputs first and then build the command List
+//        Comparator<InputDataObjectType> inputOrderComparator = new Comparator<InputDataObjectType>() {
+//            @Override
+//            public int compare(InputDataObjectType inputDataObjectType, InputDataObjectType t1) {
+//                return inputDataObjectType.getInputOrder() - t1.getInputOrder();
+//            }
+//        };
+//        Set<InputDataObjectType> sortedInputSet = new TreeSet<InputDataObjectType>(inputOrderComparator);
+//        for (Object object : inputParameters.values()) {
+//            if (object instanceof InputDataObjectType) {
+//                InputDataObjectType inputDOT = (InputDataObjectType) object;
+//                sortedInputSet.add(inputDOT);
+//            }
+//        }
+//        for (InputDataObjectType inputDataObjectType : sortedInputSet) {
+//            if (inputDataObjectType.getApplicationArgument() != null
+//                    && !inputDataObjectType.getApplicationArgument().equals("")) {
+//                cmdList.add(inputDataObjectType.getApplicationArgument());
+//            }
+//
+//            if (inputDataObjectType.getValue() != null
+//                    && !inputDataObjectType.getValue().equals("")) {
+//                cmdList.add(inputDataObjectType.getValue());
+//            }
+//        }
+//
+//    }
+//
+//    private void initProcessBuilder(ApplicationDeploymentDescription app){
+//        builder = new ProcessBuilder(cmdList);
+//
+//        List<SetEnvPaths> setEnvironment = app.getSetEnvironment();
+//        if (setEnvironment != null) {
+//            for (SetEnvPaths envPath : setEnvironment) {
+//                Map<String,String> builderEnv = builder.environment();
+//                builderEnv.put(envPath.getName(), envPath.getValue());
+//            }
+//        }
+//    }
+//
+//    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
+//
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
index 2b45df7..8d7cd8d 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
@@ -1,51 +1,51 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.local.utils;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-
-public class LocalProviderUtil {
-    private static final Logger log = LoggerFactory.getLogger(LocalProviderUtil.class);
-
-    private void makeFileSystemDir(String dir) throws GFacProviderException {
-        File f = new File(dir);
-        if (f.isDirectory() && f.exists()) {
-            return;
-        } else if (!new File(dir).mkdir()) {
-            throw new GFacProviderException("Cannot make directory " + dir);
-        }
-    }
-
-    public void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        log.info("working diectroy = " + jobExecutionContext.getWorkingDir());
-        log.info("temp directory = " + jobExecutionContext.getScratchLocation());
-        makeFileSystemDir(jobExecutionContext.getWorkingDir());
-        makeFileSystemDir(jobExecutionContext.getScratchLocation());
-        makeFileSystemDir(jobExecutionContext.getInputDir());
-        makeFileSystemDir(jobExecutionContext.getOutputDir());
-    }
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.local.utils;
+//
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//
+//public class LocalProviderUtil {
+//    private static final Logger log = LoggerFactory.getLogger(LocalProviderUtil.class);
+//
+//    private void makeFileSystemDir(String dir) throws GFacProviderException {
+//        File f = new File(dir);
+//        if (f.isDirectory() && f.exists()) {
+//            return;
+//        } else if (!new File(dir).mkdir()) {
+//            throw new GFacProviderException("Cannot make directory " + dir);
+//        }
+//    }
+//
+//    public void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+//        log.info("working diectroy = " + jobExecutionContext.getWorkingDir());
+//        log.info("temp directory = " + jobExecutionContext.getScratchLocation());
+//        makeFileSystemDir(jobExecutionContext.getWorkingDir());
+//        makeFileSystemDir(jobExecutionContext.getScratchLocation());
+//        makeFileSystemDir(jobExecutionContext.getInputDir());
+//        makeFileSystemDir(jobExecutionContext.getOutputDir());
+//    }
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/df3fbe6a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
index 68fb39c..587bf46 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
@@ -1,229 +1,229 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.ssh.handler;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.experiment.*;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.*;
-
-/**
- * This handler will copy input data from gateway machine to airavata
- * installed machine, later running handlers can copy the input files to computing resource
- * <Handler class="AdvancedSCPOutputHandler">
- * <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
- * <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
- * <property name="userName" value="airavata"/>
- * <property name="hostName" value="gw98.iu.xsede.org"/>
- * <property name="inputPath" value="/home/airavata/outputData"/>
- */
-public class AdvancedSCPInputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPInputHandler.class);
-    public static final String ADVANCED_SSH_AUTH = "advanced.ssh.auth";
-    public static final int DEFAULT_SSH_PORT = 22;
-
-    private String password = null;
-
-    private String publicKeyPath;
-
-    private String passPhrase;
-
-    private String privateKeyPath;
-
-    private String userName;
-
-    private String hostName;
-
-    private String inputPath;
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-        password = (String) properties.get("password");
-        passPhrase = (String) properties.get("passPhrase");
-        privateKeyPath = (String) properties.get("privateKeyPath");
-        publicKeyPath = (String) properties.get("publicKeyPath");
-        userName = (String) properties.get("userName");
-        hostName = (String) properties.get("hostName");
-        inputPath = (String) properties.get("inputPath");
-    }
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        super.invoke(jobExecutionContext);
-        int index = 0;
-        int oldIndex = 0;
-        List<String> oldFiles = new ArrayList<String>();
-        MessageContext inputNew = new MessageContext();
-        StringBuffer data = new StringBuffer("|");
-        RemoteCluster remoteCluster = null;
-
-        try {
-            String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
-            if (pluginData != null) {
-                try {
-                    oldIndex = Integer.parseInt(pluginData.split("\\|")[0].trim());
-                    oldFiles = Arrays.asList(pluginData.split("\\|")[1].split(","));
-                    if (oldIndex == oldFiles.size()) {
-                        log.info("Old data looks good !!!!");
-                    } else {
-                        oldIndex = 0;
-                        oldFiles.clear();
-                    }
-                } catch (NumberFormatException e) {
-                    log.error("Previously stored data " + pluginData + " is wrong so we continue the operations");
-                }
-            }
-
-            AuthenticationInfo authenticationInfo = null;
-            if (password != null) {
-                authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
-            } else {
-                authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
-                        this.passPhrase);
-            }
-
-            // Server info
-            String parentPath = inputPath + File.separator + jobExecutionContext.getExperimentID() + File.separator + jobExecutionContext.getTaskData().getTaskID();
-            if (index < oldIndex) {
-                parentPath = oldFiles.get(index);
-                data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-            } else {
-                (new File(parentPath)).mkdirs();
-                StringBuffer temp = new StringBuffer(data.append(parentPath).append(",").toString());
-                GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-            }
-            DataTransferDetails detail = new DataTransferDetails();
-            TransferStatus status = new TransferStatus();
-            // here doesn't matter what the job manager is because we are only doing some file handling
-            // not really dealing with monitoring or job submission, so we pa
-
-            MessageContext input = jobExecutionContext.getInMessageContext();
-            Set<String> parameters = input.getParameters().keySet();
-            for (String paramName : parameters) {
-                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-                String paramValue = inputParamType.getValue();
-                // TODO: Review this with type
-                if (inputParamType.getType() == DataType.URI) {
-                    try {
-                        URL file = new URL(paramValue);
-                        String key = file.getUserInfo() + file.getHost() + DEFAULT_SSH_PORT;
-                        GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, file.getUserInfo(), file.getHost(), DEFAULT_SSH_PORT);
-                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
-                        paramValue = file.getPath();
-                    } catch (MalformedURLException e) {
-                        String key = this.userName + this.hostName + DEFAULT_SSH_PORT;
-                        GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
-                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
-                        log.error(e.getLocalizedMessage(), e);
-                    }
-
-                    if (index < oldIndex) {
-                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-                        inputParamType.setValue(oldFiles.get(index));
-                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-                    } else {
-                        String stageInputFile = stageInputFiles(remoteCluster, paramValue, parentPath);
-                        inputParamType.setValue(stageInputFile);
-                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
-                        status.setTransferState(TransferState.UPLOAD);
-                        detail.setTransferStatus(status);
-                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
-                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                    }
-                }
-                // FIXME: what is the thrift model DataType equivalent for URIArray type?
-//                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-//                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-//                    List<String> newFiles = new ArrayList<String>();
-//                    for (String paramValueEach : split) {
-//                        try {
-//                            URL file = new URL(paramValue);
-//                            this.userName = file.getUserInfo();
-//                            this.hostName = file.getHost();
-//                            paramValueEach = file.getPath();
-//                        } catch (MalformedURLException e) {
-//                            log.error(e.getLocalizedMessage(), e);
-//                        }
-//                        if (index < oldIndex) {
-//                            log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-//                            newFiles.add(oldFiles.get(index));
-//                            data.append(oldFiles.get(index++)).append(",");
-//                        } else {
-//                            String stageInputFiles = stageInputFiles(remoteCluster, paramValueEach, parentPath);
-//                            StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
-//                            GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-//                            newFiles.add(stageInputFiles);
-//                        }
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.gfac.ssh.handler;
+//
+//import org.apache.airavata.gfac.core.GFacException;
+//import org.apache.airavata.gfac.core.SSHApiException;
+//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.handler.AbstractHandler;
+//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+//import org.apache.airavata.gfac.core.GFacUtils;
+//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
+//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
+//import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
+//import org.apache.airavata.model.appcatalog.appinterface.DataType;
+//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+//import org.apache.airavata.model.experiment.*;
+//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.io.File;
+//import java.io.PrintWriter;
+//import java.io.StringWriter;
+//import java.net.MalformedURLException;
+//import java.net.URL;
+//import java.util.*;
+//
+///**
+// * This handler will copy input data from gateway machine to airavata
+// * installed machine, later running handlers can copy the input files to computing resource
+// * <Handler class="AdvancedSCPOutputHandler">
+// * <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
+// * <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
+// * <property name="userName" value="airavata"/>
+// * <property name="hostName" value="gw98.iu.xsede.org"/>
+// * <property name="inputPath" value="/home/airavata/outputData"/>
+// */
+//public class AdvancedSCPInputHandler extends AbstractHandler {
+//    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPInputHandler.class);
+//    public static final String ADVANCED_SSH_AUTH = "advanced.ssh.auth";
+//    public static final int DEFAULT_SSH_PORT = 22;
+//
+//    private String password = null;
+//
+//    private String publicKeyPath;
+//
+//    private String passPhrase;
+//
+//    private String privateKeyPath;
+//
+//    private String userName;
+//
+//    private String hostName;
+//
+//    private String inputPath;
+//
+//    public void initProperties(Properties properties) throws GFacHandlerException {
+//        password = (String) properties.get("password");
+//        passPhrase = (String) properties.get("passPhrase");
+//        privateKeyPath = (String) properties.get("privateKeyPath");
+//        publicKeyPath = (String) properties.get("publicKeyPath");
+//        userName = (String) properties.get("userName");
+//        hostName = (String) properties.get("hostName");
+//        inputPath = (String) properties.get("inputPath");
+//    }
+//
+//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        super.invoke(jobExecutionContext);
+//        int index = 0;
+//        int oldIndex = 0;
+//        List<String> oldFiles = new ArrayList<String>();
+//        MessageContext inputNew = new MessageContext();
+//        StringBuffer data = new StringBuffer("|");
+//        RemoteCluster remoteCluster = null;
+//
+//        try {
+//            String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
+//            if (pluginData != null) {
+//                try {
+//                    oldIndex = Integer.parseInt(pluginData.split("\\|")[0].trim());
+//                    oldFiles = Arrays.asList(pluginData.split("\\|")[1].split(","));
+//                    if (oldIndex == oldFiles.size()) {
+//                        log.info("Old data looks good !!!!");
+//                    } else {
+//                        oldIndex = 0;
+//                        oldFiles.clear();
 //                    }
-//                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+//                } catch (NumberFormatException e) {
+//                    log.error("Previously stored data " + pluginData + " is wrong so we continue the operations");
 //                }
-                inputNew.getParameters().put(paramName, inputParamType);
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage());
-            try {
-                StringWriter errors = new StringWriter();
-                e.printStackTrace(new PrintWriter(errors));
-                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-            } catch (GFacException e1) {
-                log.error(e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-        jobExecutionContext.setInMessageContext(inputNew);
-    }
-
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        this.invoke(jobExecutionContext);
-    }
-
-    private String stageInputFiles(RemoteCluster remoteCluster, String paramValue, String parentPath) throws GFacException {
-        try {
-            remoteCluster.scpFrom(paramValue, parentPath);
-            return "file://" + parentPath + File.separator + (new File(paramValue)).getName();
-        } catch (SSHApiException e) {
-            log.error("Error tranfering remote file to local file, remote path: " + paramValue);
-            throw new GFacException(e);
-        }
-    }
-}
+//            }
+//
+//            AuthenticationInfo authenticationInfo = null;
+//            if (password != null) {
+//                authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
+//            } else {
+//                authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
+//                        this.passPhrase);
+//            }
+//
+//            // Server info
+//            String parentPath = inputPath + File.separator + jobExecutionContext.getExperimentID() + File.separator + jobExecutionContext.getTaskData().getTaskID();
+//            if (index < oldIndex) {
+//                parentPath = oldFiles.get(index);
+//                data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
+//            } else {
+//                (new File(parentPath)).mkdirs();
+//                StringBuffer temp = new StringBuffer(data.append(parentPath).append(",").toString());
+//                GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
+//            }
+//            DataTransferDetails detail = new DataTransferDetails();
+//            TransferStatus status = new TransferStatus();
+//            // here doesn't matter what the job manager is because we are only doing some file handling
+//            // not really dealing with monitoring or job submission, so we pa
+//
+//            MessageContext input = jobExecutionContext.getInMessageContext();
+//            Set<String> parameters = input.getParameters().keySet();
+//            for (String paramName : parameters) {
+//                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
+//                String paramValue = inputParamType.getValue();
+//                // TODO: Review this with type
+//                if (inputParamType.getType() == DataType.URI) {
+//                    try {
+//                        URL file = new URL(paramValue);
+//                        String key = file.getUserInfo() + file.getHost() + DEFAULT_SSH_PORT;
+//                        GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, file.getUserInfo(), file.getHost(), DEFAULT_SSH_PORT);
+//                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
+//                        paramValue = file.getPath();
+//                    } catch (MalformedURLException e) {
+//                        String key = this.userName + this.hostName + DEFAULT_SSH_PORT;
+//                        GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
+//                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
+//                        log.error(e.getLocalizedMessage(), e);
+//                    }
+//
+//                    if (index < oldIndex) {
+//                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
+//                        inputParamType.setValue(oldFiles.get(index));
+//                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
+//                    } else {
+//                        String stageInputFile = stageInputFiles(remoteCluster, paramValue, parentPath);
+//                        inputParamType.setValue(stageInputFile);
+//                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
+//                        status.setTransferState(TransferState.UPLOAD);
+//                        detail.setTransferStatus(status);
+//                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
+//                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//
+//                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
+//                    }
+//                }
+//                // FIXME: what is the thrift model DataType equivalent for URIArray type?
+////                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
+////                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
+////                    List<String> newFiles = new ArrayList<String>();
+////                    for (String paramValueEach : split) {
+////                        try {
+////                            URL file = new URL(paramValue);
+////                            this.userName = file.getUserInfo();
+////                            this.hostName = file.getHost();
+////                            paramValueEach = file.getPath();
+////                        } catch (MalformedURLException e) {
+////                            log.error(e.getLocalizedMessage(), e);
+////                        }
+////                        if (index < oldIndex) {
+////                            log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
+////                            newFiles.add(oldFiles.get(index));
+////                            data.append(oldFiles.get(index++)).append(",");
+////                        } else {
+////                            String stageInputFiles = stageInputFiles(remoteCluster, paramValueEach, parentPath);
+////                            StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
+////                            GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
+////                            newFiles.add(stageInputFiles);
+////                        }
+////                    }
+////                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+////                }
+//                inputNew.getParameters().put(paramName, inputParamType);
+//            }
+//        } catch (Exception e) {
+//            log.error(e.getMessage());
+//            try {
+//                StringWriter errors = new StringWriter();
+//                e.printStackTrace(new PrintWriter(errors));
+//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+//            } catch (GFacException e1) {
+//                log.error(e1.getLocalizedMessage());
+//            }
+//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+//        }
+//        jobExecutionContext.setInMessageContext(inputNew);
+//    }
+//
+//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+//        this.invoke(jobExecutionContext);
+//    }
+//
+//    private String stageInputFiles(RemoteCluster remoteCluster, String paramValue, String parentPath) throws GFacException {
+//        try {
+//            remoteCluster.scpFrom(paramValue, parentPath);
+//            return "file://" + parentPath + File.separator + (new File(paramValue)).getName();
+//        } catch (SSHApiException e) {
+//            log.error("Error tranfering remote file to local file, remote path: " + paramValue);
+//            throw new GFacException(e);
+//        }
+//    }
+//}