You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by sh...@apache.org on 2015/11/09 16:59:39 UTC

[1/2] airavata git commit: Deleted unused classes

Repository: airavata
Updated Branches:
  refs/heads/master 140d9bd50 -> 08cdad264


http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
deleted file mode 100644
index bd8a0bc..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ /dev/null
@@ -1,473 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-// */
-//
-//package org.apache.airavata.gfac.ssh.provider.impl;
-//
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.model.experiment.TaskState;
-//import org.apache.airavata.registry.cpi.AppCatalogException;
-//import org.apache.airavata.common.exception.AiravataException;
-//import org.apache.airavata.common.exception.ApplicationSettingsException;
-//import org.apache.airavata.common.utils.LocalEventPublisher;
-//import org.apache.airavata.gfac.core.GFacConstants;
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.JobDescriptor;
-//import org.apache.airavata.gfac.core.SSHApiException;
-//import org.apache.airavata.gfac.core.cluster.JobStatus;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.context.MessageContext;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.monitor.MonitorID;
-//import org.apache.airavata.gfac.core.monitor.state.GfacExperimentStateChangeRequest;
-//import org.apache.airavata.gfac.core.provider.AbstractProvider;
-//import org.apache.airavata.gfac.core.provider.GFacProviderException;
-//import org.apache.airavata.gfac.core.states.GfacExperimentState;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.gsi.ssh.api.CommandExecutor;
-//import org.apache.airavata.gfac.impl.StandardOutReader;
-//import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
-//import org.apache.airavata.gfac.monitor.email.EmailMonitorFactory;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-//import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-//import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
-//import org.apache.airavata.model.appcatalog.appinterface.DataType;
-//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-//import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-//import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
-//import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
-//import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
-//import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-//import org.apache.airavata.model.experiment.CorrectiveAction;
-//import org.apache.airavata.model.experiment.ErrorCategory;
-//import org.apache.airavata.model.experiment.JobDetails;
-//import org.apache.airavata.model.experiment.JobState;
-//import org.apache.xmlbeans.XmlException;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-//
-//import java.io.*;
-//import java.util.*;
-//
-///**
-// * Execute application using remote SSH
-// */
-//public class SSHProvider extends AbstractProvider {
-//    private static final Logger log = LoggerFactory.getLogger(SSHProvider.class);
-//    private RemoteCluster remoteCluster;
-//    private String jobID = null;
-//    private String taskID = null;
-//    // we keep gsisshprovider to support qsub submission incase of hpc scenario with ssh
-//    private boolean hpcType = false;
-//
-//    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-//        try {
-//            super.initialize(jobExecutionContext);
-//            String hostAddress = jobExecutionContext.getHostName();
-//            ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
-//            ResourceJobManagerType resourceJobManagerType = resourceJobManager.getResourceJobManagerType();
-//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-//                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//            }
-//            taskID = jobExecutionContext.getTaskData().getTaskID();
-//
-//            JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
-//            if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH && resourceJobManagerType == ResourceJobManagerType.FORK) {
-//                jobID = "SSH_" + jobExecutionContext.getHostName() + "_" + Calendar.getInstance().getTimeInMillis();
-//                remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-//
-//                String remoteFile = jobExecutionContext.getWorkingDir() + File.separatorChar + GFacConstants.EXECUTABLE_NAME;
-//                details.setJobID(taskID);
-//                details.setJobDescription(remoteFile);
-//                jobExecutionContext.setJobDetails(details);
-//                // FIXME : Why remoteCluster is passed as null
-//                JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
-//                details.setJobDescription(jobDescriptor.toXML());
-//
-//                GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
-//                log.info(remoteFile);
-//                File runscript = createShellScript(jobExecutionContext);
-//                remoteCluster.scpTo(remoteFile, runscript.getAbsolutePath());
-//            } else {
-//                hpcType = true;
-//            }
-//        } catch (ApplicationSettingsException e) {
-//            log.error(e.getMessage());
-//            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//        } catch (Exception e) {
-//            throw new GFacProviderException(e.getLocalizedMessage(), e);
-//        }
-//    }
-//
-//
-//    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-//        if (!hpcType) {
-//            try {
-//                /*
-//                 * Execute
-//                 */
-//                String executable = jobExecutionContext.getWorkingDir() + File.separatorChar + GFacConstants.EXECUTABLE_NAME;
-//                details.setJobDescription(executable);
-//                RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + executable + "; " + executable);
-//                StandardOutReader jobIDReaderCommandOutput = new StandardOutReader();
-//                log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-//                CommandExecutor.executeCommand(rawCommandInfo, remoteCluster.getSession(), jobIDReaderCommandOutput);
-//                String stdOutputString = getOutputifAvailable(jobIDReaderCommandOutput, "Error submitting job to resource");
-//                log.info("stdout=" + stdOutputString);
-//            } catch (Exception e) {
-//                throw new GFacProviderException(e.getMessage(), e);
-//            }
-//        } else {
-//            try {
-//                StringBuffer data = new StringBuffer();
-//                JobDetails jobDetails = new JobDetails();
-//                String hostAddress = jobExecutionContext.getHostName();
-//                LocalEventPublisher localEventPublisher = jobExecutionContext.getLocalEventPublisher();
-//                try {
-//                    RemoteCluster remoteCluster = null;
-//                    if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-//                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//                    }
-//                    remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-//                    if (remoteCluster == null) {
-//                        throw new GFacProviderException("Security context is not set properly");
-//                    } else {
-//                        log.info("Successfully retrieved the Security Context");
-//                    }
-//                    // This installed path is a mandetory field, because this could change based on the computing resource
-//                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
-//                    jobDetails.setJobName(jobDescriptor.getJobName());
-//                    log.info(jobDescriptor.toXML());
-//                    jobDetails.setJobDescription(jobDescriptor.toXML());
-//                    String jobID = remoteCluster.submitBatchJob(jobDescriptor);
-//                    if (jobID != null && !jobID.isEmpty()) {
-//                        jobDetails.setJobID(jobID);
-//                        GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SUBMITTED);
-//                                localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
-//                                        , GfacExperimentState.JOBSUBMITTED));
-//                        jobExecutionContext.setJobDetails(jobDetails);
-//                        if (verifyJobSubmissionByJobId(remoteCluster, jobID)) {
-//                            localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
-//                                    , GfacExperimentState.JOBSUBMITTED));
-//                            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.QUEUED);
-//                        }
-//                    } else {
-//                        jobExecutionContext.setJobDetails(jobDetails);
-//                        int verificationTryCount = 0;
-//                        while (verificationTryCount++ < 3) {
-//                            String verifyJobId = verifyJobSubmission(remoteCluster, jobDetails);
-//                            if (verifyJobId != null && !verifyJobId.isEmpty()) {
-//                                // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
-//                                jobID = verifyJobId;
-//                                jobDetails.setJobID(jobID);
-//                                localEventPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
-//                                        , GfacExperimentState.JOBSUBMITTED));
-//                                GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.QUEUED);
-//                                break;
-//                            }
-//                            Thread.sleep(verificationTryCount * 1000);
-//                        }
-//                    }
-//
-//                    if (jobID == null || jobID.isEmpty()) {
-//                        String msg = "expId:" + jobExecutionContext.getExperimentID() + " Couldn't find remote jobId for JobName:"
-//                                + jobDetails.getJobName() + ", both submit and verify steps doesn't return a valid JobId. Hence changing experiment state to Failed";
-//                        log.error(msg);
-//                        GFacUtils.saveErrorDetails(jobExecutionContext, msg, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//                        GFacUtils.publishTaskStatus(jobExecutionContext, localEventPublisher, TaskState.FAILED);
-//                        return;
-//                    }
-//                    data.append("jobDesc=").append(jobDescriptor.toXML());
-//                    data.append(",jobId=").append(jobDetails.getJobID());
-//                    monitor(jobExecutionContext);
-//                } catch (SSHApiException e) {
-//                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-//                    log.error(error);
-//                    jobDetails.setJobID("none");
-//                    GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-//                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//                    throw new GFacProviderException(error, e);
-//                } catch (Exception e) {
-//                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-//                    log.error(error);
-//                    jobDetails.setJobID("none");
-//                    GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-//                    GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//                    throw new GFacProviderException(error, e);
-//                } finally {
-//                    log.info("Saving data for future recovery: ");
-//                    log.info(data.toString());
-//                    GFacUtils.saveHandlerData(jobExecutionContext, data, this.getClass().getName());
-//                }
-//            } catch (GFacException e) {
-//                throw new GFacProviderException(e.getMessage(), e);
-//            }
-//        }
-//    }
-//
-//    private boolean verifyJobSubmissionByJobId(RemoteCluster remoteCluster, String jobID) throws SSHApiException {
-//        JobStatus status = remoteCluster.getJobStatus(jobID);
-//        return status != null &&  status != JobStatus.U;
-//    }
-//
-//    private String verifyJobSubmission(RemoteCluster remoteCluster, JobDetails jobDetails) {
-//        String jobName = jobDetails.getJobName();
-//        String jobId = null;
-//        try {
-//          jobId  = remoteCluster.getJobIdByJobName(jobName, remoteCluster.getServerInfo().getUserName());
-//        } catch (SSHApiException e) {
-//            log.error("Error while verifying JobId from JobName");
-//        }
-//        return jobId;
-//    }
-//
-//    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-//
-//    }
-//
-//    public boolean cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-//        JobDetails jobDetails = jobExecutionContext.getJobDetails();
-//        StringBuffer data = new StringBuffer();
-//        String hostAddress = jobExecutionContext.getHostName();
-//        if (!hpcType) {
-//            throw new NotImplementedException();
-//        } else {
-//            RemoteCluster remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-//            if (remoteCluster == null) {
-//                throw new GFacProviderException("Security context is not set properly");
-//            } else {
-//                log.info("Successfully retrieved the Security Context");
-//            }
-//            // This installed path is a mandetory field, because this could change based on the computing resource
-//            if (jobDetails == null) {
-//                log.error("There is not JobDetails, Cancel request can't be performed !!!");
-//                return false;
-//            }
-//            try {
-//                if (jobDetails.getJobID() != null) {
-//                    if (remoteCluster.cancelJob(jobDetails.getJobID()) != null) {
-//                        // if this operation success without any exceptions, we can assume cancel operation succeeded.
-//                        GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
-//                        return true;
-//                    } else {
-//                        log.info("Job Cancel operation failed");
-//                    }
-//                } else {
-//                    log.error("No Job Id is set, so cannot perform the cancel operation !!!");
-//                    throw new GFacProviderException("Cancel request failed to cancel job as JobId is null in Job Execution Context");
-//                }
-//            } catch (SSHApiException e) {
-//                String error = "Cancel request failed " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-//                log.error(error);
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-////                throw new GFacProviderException(error, e);
-//            } catch (Exception e) {
-//                String error = "Cancel request failed " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-//                log.error(error);
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-////                throw new GFacProviderException(error, e);
-//            }
-//            return false;
-//        }
-//    }
-//
-//    private File createShellScript(JobExecutionContext context) throws IOException {
-//        String uniqueDir = jobExecutionContext.getApplicationName() + System.currentTimeMillis()
-//                + new Random().nextLong();
-//
-//        File shellScript = File.createTempFile(uniqueDir, "sh");
-//        OutputStream out = new FileOutputStream(shellScript);
-//
-//        out.write("#!/bin/bash\n".getBytes());
-//        out.write(("cd " + jobExecutionContext.getWorkingDir() + "\n").getBytes());
-//        out.write(("export " + GFacConstants.INPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getInputDir() + "\n").getBytes());
-//        out.write(("export " + GFacConstants.OUTPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getOutputDir() + "\n")
-//                .getBytes());
-//        // get the env of the host and the application
-//        List<SetEnvPaths> envPathList = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getSetEnvironment();
-//        for (SetEnvPaths setEnvPaths : envPathList) {
-//            log.debug("Env[" + setEnvPaths.getName() + "] = " + setEnvPaths.getValue());
-//            out.write(("export " + setEnvPaths.getName() + "=" + setEnvPaths.getValue() + "\n").getBytes());
-//        }
-//
-//        // prepare the command
-//        final String SPACE = " ";
-//        StringBuffer cmd = new StringBuffer();
-//        cmd.append(jobExecutionContext.getExecutablePath());
-//        cmd.append(SPACE);
-//
-//        MessageContext input = context.getInMessageContext();
-//        Map<String, Object> inputs = input.getParameters();
-//        Set<String> keys = inputs.keySet();
-//        for (String paramName : keys) {
-//            InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-//            //if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-//            if (inputParamType.getType() == DataType.URI) {
-//                String value = inputParamType.getValue();
-//                cmd.append(value);
-//                cmd.append(SPACE);
-//            } else {
-//                String paramValue = inputParamType.getValue();
-//                cmd.append(paramValue);
-//                cmd.append(SPACE);
-//            }
-//        }
-//        // We redirect the error and stdout to remote files, they will be read
-//        // in later
-//        cmd.append(SPACE);
-//        cmd.append("1>");
-//        cmd.append(SPACE);
-//        cmd.append(jobExecutionContext.getStandardOutput());
-//        cmd.append(SPACE);
-//        cmd.append("2>");
-//        cmd.append(SPACE);
-//        cmd.append(jobExecutionContext.getStandardError());
-//
-//        String cmdStr = cmd.toString();
-//        log.info("Command = " + cmdStr);
-//        out.write((cmdStr + "\n").getBytes());
-//        String message = "\"execuationSuceeded\"";
-//        out.write(("echo " + message + "\n").getBytes());
-//        out.close();
-//
-//        return shellScript;
-//    }
-//
-//    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-//
-//    }
-//
-//    /**
-//     * This method will read standard output and if there's any it will be parsed
-//     *
-//     * @param jobIDReaderCommandOutput
-//     * @param errorMsg
-//     * @return
-//     * @throws SSHApiException
-//     */
-//    private String getOutputifAvailable(StandardOutReader jobIDReaderCommandOutput, String errorMsg) throws SSHApiException {
-//        String stdOutputString = jobIDReaderCommandOutput.getStdOutputString();
-//        String stdErrorString = jobIDReaderCommandOutput.getStdErrorString();
-//
-//        if (stdOutputString == null || stdOutputString.isEmpty() || (stdErrorString != null && !stdErrorString.isEmpty())) {
-//            log.error("Standard Error output : " + stdErrorString);
-//            throw new SSHApiException(errorMsg + stdErrorString);
-//        }
-//        return stdOutputString;
-//    }
-//
-//    public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-//        // have to implement the logic to recover a gfac failure
-//        initialize(jobExecutionContext);
-//        if(hpcType) {
-//            log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
-//            String hostName = jobExecutionContext.getHostName();
-//            String jobId = "";
-//            String jobDesc = "";
-//            String jobName = "";
-//            try {
-//                String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
-//                String[] split = pluginData.split(",");
-//                if (split.length < 2) {
-//                    this.execute(jobExecutionContext);
-//                    return;
-//                }
-//                jobDesc = split[0].substring(8);
-//                jobId = split[1].substring(6);
-//                try {
-//                    JobDescriptor jobDescriptor = JobDescriptor.fromXML(jobDesc);
-//                    jobName = jobDescriptor.getJobName();
-//                } catch (XmlException e) {
-//                    log.error(e.getMessage(), e);
-//                    log.error("Cannot parse plugin data stored, but trying to recover");
-//
-//                }
-//                log.info("Following data have recovered: ");
-//                log.info("Job Description: " + jobDesc);
-//                log.info("Job Id: " + jobId);
-//                if (jobName.isEmpty() || jobId.isEmpty() || "none".equals(jobId) ||
-//                        "".equals(jobId)) {
-//                    log.info("Cannot recover data so submitting the job again !!!");
-//                    this.execute(jobExecutionContext);
-//                    return;
-//                }
-//            } catch (Exception e) {
-//                log.error("Error while  recovering provider", e);
-//            }
-//            try {
-//                // Now we are we have enough data to recover
-//                JobDetails jobDetails = new JobDetails();
-//                jobDetails.setJobDescription(jobDesc);
-//                jobDetails.setJobID(jobId);
-//                jobDetails.setJobName(jobName);
-//                jobExecutionContext.setJobDetails(jobDetails);
-//                if (jobExecutionContext.getSecurityContext(hostName) == null) {
-//                    try {
-//                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//                    } catch (ApplicationSettingsException e) {
-//                        log.error(e.getMessage());
-//                        throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//                    }
-//                }
-//                monitor(jobExecutionContext);
-//            } catch (Exception e) {
-//                log.error("Error while recover the job", e);
-//                throw new GFacProviderException("Error delegating already ran job to Monitoring", e);
-//            }
-//        }else{
-//            log.info("We do not handle non hpc recovery so we simply run the Job directly");
-//            this.execute(jobExecutionContext);
-//        }
-//    }
-//
-//    @Override
-//    public void monitor(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-//        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
-//            String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
-//            SSHJobSubmission sshJobSubmission = null;
-//            try {
-//                sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
-//            } catch (AppCatalogException e) {
-//                throw new GFacException("Error while reading compute resource", e);
-//            }
-//            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
-//            if (monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR) {
-//                try {
-//                    EmailBasedMonitor emailBasedMonitor = EmailMonitorFactory.getEmailBasedMonitor(
-//                            sshJobSubmission.getResourceJobManager().getResourceJobManagerType());
-//                    emailBasedMonitor.addToJobMonitorMap(jobExecutionContext);
-//                } catch (AiravataException e) {
-//                    throw new GFacHandlerException("Error while activating email job monitoring ", e);
-//                }
-//                return;
-//            }
-//        } else {
-//            throw new IllegalArgumentException("Monitoring is implemented only for SSH, "
-//                    + jobExecutionContext.getPreferredJobSubmissionProtocol().name() + " is not yet implemented");
-//        }
-//
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
deleted file mode 100644
index 9532c53..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
+++ /dev/null
@@ -1,562 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.ssh.util;
-//
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.gfac.impl.HPCRemoteCluster;
-//import org.apache.airavata.registry.cpi.AppCatalog;
-//import org.apache.airavata.registry.cpi.AppCatalogException;
-//import org.apache.airavata.common.exception.ApplicationSettingsException;
-//import org.apache.airavata.common.utils.ServerSettings;
-//import org.apache.airavata.credential.store.credential.impl.ssh.SSHCredential;
-//import org.apache.airavata.gfac.core.GFacConstants;
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.RequestData;
-//import org.apache.airavata.gfac.core.JobDescriptor;
-//import org.apache.airavata.gfac.core.JobManagerConfiguration;
-//import org.apache.airavata.gfac.core.cluster.ServerInfo;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.context.MessageContext;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.gsi.ssh.impl.GSISSHAbstractCluster;
-//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-//import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;
-//import org.apache.airavata.gfac.ssh.context.SSHAuthWrapper;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
-//import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-//import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
-//import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
-//import org.apache.airavata.model.appcatalog.appinterface.DataType;
-//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-//import org.apache.airavata.model.appcatalog.computeresource.*;
-//import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
-//import org.apache.airavata.model.experiment.ComputationalResourceScheduling;
-//import org.apache.airavata.model.experiment.CorrectiveAction;
-//import org.apache.airavata.model.experiment.ErrorCategory;
-//import org.apache.airavata.model.experiment.TaskDetails;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.io.File;
-//import java.io.PrintWriter;
-//import java.io.StringWriter;
-//import java.util.*;
-//
-//public class GFACSSHUtils {
-//    private final static Logger logger = LoggerFactory.getLogger(GFACSSHUtils.class);
-//
-//    public static Map<String, List<RemoteCluster>> clusters = new HashMap<String, List<RemoteCluster>>();
-//
-//    public static final String PBS_JOB_MANAGER = "pbs";
-//    public static final String SLURM_JOB_MANAGER = "slurm";
-//    public static final String SUN_GRID_ENGINE_JOB_MANAGER = "UGE";
-//    public static final String LSF_JOB_MANAGER = "LSF";
-//
-//    public static int maxClusterCount = 5;
-//
-//    /**
-//     * This method is to add computing resource specific authentication, if its a third party machine, use the other addSecurityContext
-//     * @param jobExecutionContext
-//     * @throws GFacException
-//     * @throws ApplicationSettingsException
-//     */
-//    public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-//        JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
-//        JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
-//        if (preferredJobSubmissionProtocol == JobSubmissionProtocol.GLOBUS || preferredJobSubmissionProtocol == JobSubmissionProtocol.UNICORE) {
-//            logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-//        } else if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
-//            try {
-//                AppCatalog appCatalog = jobExecutionContext.getAppCatalog();
-//                SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
-//                SecurityProtocol securityProtocol = sshJobSubmission.getSecurityProtocol();
-//                if (securityProtocol == SecurityProtocol.GSI || securityProtocol == SecurityProtocol.SSH_KEYS) {
-//                    SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-//                    String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-//                    RequestData requestData = new RequestData(jobExecutionContext.getGatewayID());
-//                    requestData.setTokenId(credentialStoreToken);
-//
-//                    ServerInfo serverInfo = new ServerInfo(null, jobExecutionContext.getHostName());
-//
-//                    RemoteCluster pbsRemoteCluster = null;
-//                    try {
-//                        AuthenticationInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
-//                        String installedParentPath = jobExecutionContext.getResourceJobManager().getJobManagerBinPath();
-//                        if (installedParentPath == null) {
-//                            installedParentPath = "/";
-//                        }
-//
-//                        SSHCredential credentials =((TokenizedSSHAuthInfo)tokenizedSSHAuthInfo).getCredentials();// this is just a call to get and set credentials in to this object,data will be used
-//                        if(credentials.getPrivateKey()==null || credentials.getPublicKey()==null){
-//                            // now we fall back to username password authentication
-//                            Properties configurationProperties = ServerSettings.getProperties();
-//                            tokenizedSSHAuthInfo = new DefaultPasswordAuthenticationInfo(configurationProperties.getProperty(GFacConstants.SSH_PASSWORD));
-//                        }
-//                        // This should be the login user name from compute resource preference
-//                        String loginUser = jobExecutionContext.getLoginUserName();
-//                        if (loginUser == null) {
-//                            loginUser = credentials.getPortalUserName();
-//                        }
-//                        serverInfo.setUserName(loginUser);
-//                        jobExecutionContext.getExperiment().setUserName(loginUser);
-//
-//
-//                        // inside the pbsCluser object
-//
-//                        String key = loginUser + jobExecutionContext.getHostName() + serverInfo.getPort();
-//                        boolean recreate = false;
-//                        synchronized (clusters) {
-//                            if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-//                                recreate = true;
-//                            } else if (clusters.containsKey(key)) {
-//                                int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-//                                if (clusters.get(key).get(i).getSession().isConnected()) {
-//                                    pbsRemoteCluster = clusters.get(key).get(i);
-//                                } else {
-//                                    clusters.get(key).remove(i);
-//                                    recreate = true;
-//                                }
-//                                if (!recreate) {
-//                                    try {
-//                                        pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-//                                    } catch (Exception e) {
-//                                        clusters.get(key).remove(i);
-//                                        logger.info("Connection found the connection map is expired, so we create from the scratch");
-//                                        maxClusterCount++;
-//                                        recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
-//                                    }
-//                                }
-//                                logger.info("Re-using the same connection used with the connection string:" + key);
-//                            } else {
-//                                recreate = true;
-//                            }
-//                            if (recreate) {
-//                            	 JobManagerConfiguration jConfig = null;
-//                                 String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
-//                                 if (jobManager == null) {
-//                                     logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
-//                                     jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-//                                 } else {
-//                                     if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-//                                         jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-//                                     } else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-//                                         jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
-//                                     } else if (SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-//                                         jConfig = CommonUtils.getUGEJobManager(installedParentPath);
-//                                     } else if (LSF_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-//                                         jConfig = CommonUtils.getLSFJobManager(installedParentPath);
-//                                     }
-//                                 }
-//
-//                                pbsRemoteCluster = new HPCRemoteCluster(serverInfo, tokenizedSSHAuthInfo,jConfig);
-//                                List<RemoteCluster> pbsRemoteClusters = null;
-//                                if (!(clusters.containsKey(key))) {
-//                                    pbsRemoteClusters = new ArrayList<RemoteCluster>();
-//                                } else {
-//                                    pbsRemoteClusters = clusters.get(key);
-//                                }
-//                                pbsRemoteClusters.add(pbsRemoteCluster);
-//                                clusters.put(key, pbsRemoteClusters);
-//                            }
-//                        }
-//                    } catch (Exception e) {
-//                        throw new GFacException("Error occurred...", e);
-//                    }
-//                    sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
-//                    jobExecutionContext.addSecurityContext(jobExecutionContext.getHostName(), sshSecurityContext);
-//                }
-//            } catch (AppCatalogException e) {
-//                throw new GFacException("Error while getting SSH Submission object from app catalog", e);
-//            }
-//        }
-//    }
-//
-//    /**
-//     * This method can be used to add third party resource security contexts
-//     * @param jobExecutionContext
-//     * @param sshAuth
-//     * @throws GFacException
-//     * @throws ApplicationSettingsException
-//     */
-//    public static void addSecurityContext(JobExecutionContext jobExecutionContext,SSHAuthWrapper sshAuth) throws GFacException, ApplicationSettingsException {
-//        try {
-//            if(sshAuth== null) {
-//                throw new GFacException("Error adding security Context, because sshAuthWrapper is null");
-//            }
-//            SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-//            AppCatalog appCatalog = jobExecutionContext.getAppCatalog();
-//            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
-//            SSHJobSubmission sshJobSubmission = null;
-//			try {
-//				sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
-//			} catch (Exception e1) {
-//				 logger.error("Not able to get SSHJobSubmission from registry");
-//			}
-//
-//            RemoteCluster pbsRemoteCluster = null;
-//            String key=sshAuth.getKey();
-//            boolean recreate = false;
-//            synchronized (clusters) {
-//                if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-//                    recreate = true;
-//                } else if (clusters.containsKey(key)) {
-//                    int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-//                    if (clusters.get(key).get(i).getSession().isConnected()) {
-//                        pbsRemoteCluster = clusters.get(key).get(i);
-//                    } else {
-//                        clusters.get(key).remove(i);
-//                        recreate = true;
-//                    }
-//                    if (!recreate) {
-//                        try {
-//                            pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-//                        } catch (Exception e) {
-//                            clusters.get(key).remove(i);
-//                            logger.info("Connection found the connection map is expired, so we create from the scratch");
-//                            maxClusterCount++;
-//                            recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
-//                        }
-//                    }
-//                    logger.info("Re-using the same connection used with the connection string:" + key);
-//                } else {
-//                    recreate = true;
-//                }
-//                if (recreate) {
-//               	 JobManagerConfiguration jConfig = null;
-//               	 String installedParentPath = null;
-//               	 if(jobExecutionContext.getResourceJobManager()!= null){
-//               		installedParentPath = jobExecutionContext.getResourceJobManager().getJobManagerBinPath();
-//               	 }
-//                 if (installedParentPath == null) {
-//                     installedParentPath = "/";
-//                 }
-//					if (sshJobSubmission != null) {
-//						String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
-//						if (jobManager == null) {
-//							logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
-//							jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-//						} else {
-//							if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-//								jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-//							} else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-//								jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
-//							} else if (SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-//								jConfig = CommonUtils.getUGEJobManager(installedParentPath);
-//							} else if (LSF_JOB_MANAGER.equals(jobManager)) {
-//								jConfig = CommonUtils.getLSFJobManager(installedParentPath);
-//							}
-//						}
-//					}
-//                    pbsRemoteCluster = new HPCRemoteCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),jConfig);
-//                    key = sshAuth.getKey();
-//                    List<RemoteCluster> pbsRemoteClusters = null;
-//                    if (!(clusters.containsKey(key))) {
-//                        pbsRemoteClusters = new ArrayList<RemoteCluster>();
-//                    } else {
-//                        pbsRemoteClusters = clusters.get(key);
-//                    }
-//                    pbsRemoteClusters.add(pbsRemoteCluster);
-//                    clusters.put(key, pbsRemoteClusters);
-//                }
-//            }
-//            sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
-//            jobExecutionContext.addSecurityContext(key, sshSecurityContext);
-//        } catch (Exception e) {
-//            logger.error(e.getMessage(), e);
-//            throw new GFacException("Error adding security Context", e);
-//        }
-//    }
-//
-//
-//    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws AppCatalogException, ApplicationSettingsException {
-//        JobDescriptor jobDescriptor = new JobDescriptor();
-//        TaskDetails taskData = jobExecutionContext.getTaskData();
-//
-//
-//        // set email based job monitoring email  address if monitor mode is JOB_EMAIL_NOTIFICATION_MONITOR
-//        boolean addJobNotifMail = isEmailBasedJobMonitor(jobExecutionContext);
-//        String emailIds = null;
-//        if (addJobNotifMail) {
-//            emailIds = ServerSettings.getEmailBasedMonitorAddress();
-//        }
-//        // add all configured job notification email addresses.
-//        if (ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_ENABLE).equalsIgnoreCase("true")) {
-//            String flags = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_FLAGS);
-//            if (flags != null && jobExecutionContext.getApplicationContext().getComputeResourceDescription().getHostName().equals("stampede.tacc.xsede.org")) {
-//                flags = "ALL";
-//            }
-//            jobDescriptor.setMailOptions(flags);
-//
-//            String userJobNotifEmailIds = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_EMAILIDS);
-//            if (userJobNotifEmailIds != null && !userJobNotifEmailIds.isEmpty()) {
-//                if (emailIds != null && !emailIds.isEmpty()) {
-//                    emailIds += ("," + userJobNotifEmailIds);
-//                } else {
-//                    emailIds = userJobNotifEmailIds;
-//                }
-//            }
-//
-//            if (taskData.isEnableEmailNotification()) {
-//                List<String> emailList = jobExecutionContext.getTaskData().getEmailAddresses();
-//                String elist = GFacUtils.listToCsv(emailList, ',');
-//                if (elist != null && !elist.isEmpty()) {
-//                    if (emailIds != null && !emailIds.isEmpty()) {
-//                        emailIds = emailIds + "," + elist;
-//                    } else {
-//                        emailIds = elist;
-//                    }
-//                }
-//            }
-//        }
-//        if (emailIds != null && !emailIds.isEmpty()) {
-//            logger.info("Email list: " + emailIds);
-//            jobDescriptor.setMailAddress(emailIds);
-//        }
-//        // this is common for any application descriptor
-//
-//        jobDescriptor.setCallBackIp(ServerSettings.getIp());
-//        jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
-//        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
-//        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
-//        jobDescriptor.setExecutablePath(jobExecutionContext.getApplicationContext()
-//                .getApplicationDeploymentDescription().getExecutablePath());
-//        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
-//        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
-//        String computationalProjectAccount = taskData.getTaskScheduling().getComputationalProjectAccount();
-//        if (computationalProjectAccount == null){
-//            ComputeResourcePreference computeResourcePreference = jobExecutionContext.getApplicationContext().getComputeResourcePreference();
-//            if (computeResourcePreference != null) {
-//                computationalProjectAccount = computeResourcePreference.getAllocationProjectNumber();
-//            }
-//        }
-//        if (computationalProjectAccount != null) {
-//            jobDescriptor.setAcountString(computationalProjectAccount);
-//        }
-//        // To make job name alpha numeric
-//        jobDescriptor.setJobName("A" + String.valueOf(generateJobName()));
-//        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
-//
-//        List<String> inputValues = new ArrayList<String>();
-//        MessageContext input = jobExecutionContext.getInMessageContext();
-//
-//        // sort the inputs first and then build the command ListR
-//        Comparator<InputDataObjectType> inputOrderComparator = new Comparator<InputDataObjectType>() {
-//            @Override
-//            public int compare(InputDataObjectType inputDataObjectType, InputDataObjectType t1) {
-//                return inputDataObjectType.getInputOrder() - t1.getInputOrder();
-//            }
-//        };
-//        Set<InputDataObjectType> sortedInputSet = new TreeSet<InputDataObjectType>(inputOrderComparator);
-//        for (Object object : input.getParameters().values()) {
-//            if (object instanceof InputDataObjectType) {
-//                InputDataObjectType inputDOT = (InputDataObjectType) object;
-//                sortedInputSet.add(inputDOT);
-//            }
-//        }
-//        for (InputDataObjectType inputDataObjectType : sortedInputSet) {
-//            if (!inputDataObjectType.isRequiredToAddedToCommandLine()) {
-//                continue;
-//            }
-//            if (inputDataObjectType.getApplicationArgument() != null
-//                    && !inputDataObjectType.getApplicationArgument().equals("")) {
-//                inputValues.add(inputDataObjectType.getApplicationArgument());
-//            }
-//
-//            if (inputDataObjectType.getValue() != null
-//                    && !inputDataObjectType.getValue().equals("")) {
-//                if (inputDataObjectType.getType() == DataType.URI) {
-//                    // set only the relative path
-//                    String filePath = inputDataObjectType.getValue();
-//                    filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
-//                    inputValues.add(filePath);
-//                }else {
-//                    inputValues.add(inputDataObjectType.getValue());
-//                }
-//
-//            }
-//        }
-//        Map<String, Object> outputParams = jobExecutionContext.getOutMessageContext().getParameters();
-//        for (Object outputParam : outputParams.values()) {
-//            if (outputParam instanceof OutputDataObjectType) {
-//                OutputDataObjectType output = (OutputDataObjectType) outputParam;
-//                if (output.getApplicationArgument() != null
-//                        && !output.getApplicationArgument().equals("")) {
-//                    inputValues.add(output.getApplicationArgument());
-//                }
-//                if (output.getValue() != null && !output.getValue().equals("") && output.isRequiredToAddedToCommandLine()) {
-//                    if (output.getType() == DataType.URI){
-//                        String filePath = output.getValue();
-//                        filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
-//                        inputValues.add(filePath);
-//                    }
-//                }
-//            }
-//        }
-//
-//        jobDescriptor.setInputValues(inputValues);
-//        jobDescriptor.setUserName(((GSISSHAbstractCluster) remoteCluster).getServerInfo().getUserName());
-//        jobDescriptor.setShellName("/bin/bash");
-//        jobDescriptor.setAllEnvExport(true);
-//        jobDescriptor.setOwner(((HPCRemoteCluster) remoteCluster).getServerInfo().getUserName());
-//
-//        ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
-//
-//
-//        ComputationalResourceScheduling taskScheduling = taskData.getTaskScheduling();
-//        if (taskScheduling != null) {
-//            int totalNodeCount = taskScheduling.getNodeCount();
-//            int totalCPUCount = taskScheduling.getTotalCPUCount();
-//
-//
-//            if (taskScheduling.getComputationalProjectAccount() != null) {
-//                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
-//            }
-//            if (taskScheduling.getQueueName() != null) {
-//                jobDescriptor.setQueueName(taskScheduling.getQueueName());
-//            }
-//
-//            if (totalNodeCount > 0) {
-//                jobDescriptor.setNodes(totalNodeCount);
-//            }
-//            if (taskScheduling.getComputationalProjectAccount() != null) {
-//                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
-//            }
-//            if (taskScheduling.getQueueName() != null) {
-//                jobDescriptor.setQueueName(taskScheduling.getQueueName());
-//            }
-//            if (totalCPUCount > 0) {
-//                int ppn = totalCPUCount / totalNodeCount;
-//                jobDescriptor.setProcessesPerNode(ppn);
-//                jobDescriptor.setCPUCount(totalCPUCount);
-//            }
-//            if (taskScheduling.getWallTimeLimit() > 0) {
-//                jobDescriptor.setMaxWallTime(String.valueOf(taskScheduling.getWallTimeLimit()));
-//                if(resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)){
-//                    jobDescriptor.setMaxWallTimeForLSF(String.valueOf(taskScheduling.getWallTimeLimit()));
-//                }
-//            }
-//            if (taskScheduling.getTotalPhysicalMemory() > 0) {
-//                jobDescriptor.setUsedMemory(taskScheduling.getTotalPhysicalMemory() + "");
-//            }
-//        } else {
-//            logger.error("Task scheduling cannot be null at this point..");
-//        }
-//        ApplicationDeploymentDescription appDepDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-//        List<String> moduleCmds = appDepDescription.getModuleLoadCmds();
-//        if (moduleCmds != null) {
-//            for (String moduleCmd : moduleCmds) {
-//                jobDescriptor.addModuleLoadCommands(moduleCmd);
-//            }
-//        }
-//        List<String> preJobCommands = appDepDescription.getPreJobCommands();
-//        if (preJobCommands != null) {
-//            for (String preJobCommand : preJobCommands) {
-//                jobDescriptor.addPreJobCommand(parseCommand(preJobCommand, jobExecutionContext));
-//            }
-//        }
-//
-//        List<String> postJobCommands = appDepDescription.getPostJobCommands();
-//        if (postJobCommands != null) {
-//            for (String postJobCommand : postJobCommands) {
-//                jobDescriptor.addPostJobCommand(parseCommand(postJobCommand, jobExecutionContext));
-//            }
-//        }
-//
-//        ApplicationParallelismType parallelism = appDepDescription.getParallelism();
-//        if (parallelism != null){
-//            if (parallelism == ApplicationParallelismType.MPI || parallelism == ApplicationParallelismType.OPENMP || parallelism == ApplicationParallelismType.OPENMP_MPI){
-//                Map<JobManagerCommand, String> jobManagerCommands = resourceJobManager.getJobManagerCommands();
-//                if (jobManagerCommands != null && !jobManagerCommands.isEmpty()) {
-//                    for (JobManagerCommand command : jobManagerCommands.keySet()) {
-//                        if (command == JobManagerCommand.SUBMISSION) {
-//                            String commandVal = jobManagerCommands.get(command);
-//                            jobDescriptor.setJobSubmitter(commandVal);
-//                        }
-//                    }
-//                }
-//            }
-//        }
-//        return jobDescriptor;
-//    }
-//
-//    public static boolean isEmailBasedJobMonitor(JobExecutionContext jobExecutionContext) throws AppCatalogException {
-//        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
-//            String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
-//            SSHJobSubmission sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
-//            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
-//            return monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR;
-//        } else {
-//            return false;
-//        }
-//    }
-//
-//    private static int generateJobName() {
-//        Random random = new Random();
-//        int i = random.nextInt(Integer.MAX_VALUE);
-//        i = i + 99999999;
-//        if(i<0) {
-//            i = i * (-1);
-//        }
-//        return i;
-//    }
-//
-//    private static String parseCommand(String value, JobExecutionContext jobExecutionContext) {
-//        String parsedValue = value.replaceAll("\\$workingDir", jobExecutionContext.getWorkingDir());
-//        parsedValue = parsedValue.replaceAll("\\$inputDir", jobExecutionContext.getInputDir());
-//        parsedValue = parsedValue.replaceAll("\\$outputDir", jobExecutionContext.getOutputDir());
-//        return parsedValue;
-//    }
-//    /**
-//     * This method can be used to set the Security Context if its not set and later use it in other places
-//     * @param jobExecutionContext
-//     * @param authenticationInfo
-//     * @param userName
-//     * @param hostName
-//     * @param port
-//     * @return
-//     * @throws GFacException
-//     */
-//    public static String prepareSecurityContext(JobExecutionContext jobExecutionContext, AuthenticationInfo authenticationInfo
-//            , String userName, String hostName, int port) throws GFacException {
-//        ServerInfo serverInfo = new ServerInfo(userName, hostName);
-//        String key = userName+hostName+port;
-//        SSHAuthWrapper sshAuthWrapper = new SSHAuthWrapper(serverInfo, authenticationInfo, key);
-//        if (jobExecutionContext.getSecurityContext(key) == null) {
-//            try {
-//                GFACSSHUtils.addSecurityContext(jobExecutionContext, sshAuthWrapper);
-//            } catch (ApplicationSettingsException e) {
-//                logger.error(e.getMessage());
-//                try {
-//                    StringWriter errors = new StringWriter();
-//                    e.printStackTrace(new PrintWriter(errors));
-//                    GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//                } catch (GFacException e1) {
-//                    logger.error(e1.getLocalizedMessage());
-//                }
-//                throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//            }
-//        }
-//        return key;
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
deleted file mode 100644
index cadb251..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
+++ /dev/null
@@ -1,96 +0,0 @@
-//package org.apache.airavata.gfac.ssh.util;
-//
-//import java.io.File;
-//import java.util.ArrayList;
-//import java.util.Arrays;
-//import java.util.List;
-//
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.model.appcatalog.appinterface.DataType;
-//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-///**
-// * To handle outputs of different data types
-// *
-// */
-//public class HandleOutputs {
-//	private static final Logger log = LoggerFactory.getLogger(HandleOutputs.class);
-//
-//	public static List<OutputDataObjectType> handleOutputs(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws GFacHandlerException {
-//		List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-//		try {
-//			String outputDataDir = File.separator + "tmp" + File.separator + jobExecutionContext.getExperimentID();
-//			(new File(outputDataDir)).mkdirs();
-//
-//			List<OutputDataObjectType> outputs = jobExecutionContext.getTaskData().getApplicationOutputs();
-//			List<String> outputList = remoteCluster.listDirectory(jobExecutionContext.getWorkingDir());
-//			boolean missingOutput = false;
-//
-//			for (OutputDataObjectType output : outputs) {
-//				// FIXME: Validation of outputs based on required and optional and search based on REGEX provided in search.
-//
-//				if (DataType.URI == output.getType()) {
-//                    // for failed jobs outputs are not generated. So we should not download outputs
-//                    if (GFacUtils.isFailedJob(jobExecutionContext)){
-//                       continue;
-//                    }
-//					String outputFile = output.getValue();
-//					String fileName = outputFile.substring(outputFile.lastIndexOf(File.separatorChar) + 1, outputFile.length());
-//
-//					if (output.getLocation() == null && !outputList.contains(fileName) && output.isIsRequired()) {
-//						missingOutput = true;
-//					} else {
-//						remoteCluster.scpFrom(outputFile, outputDataDir);
-//						String localFile = outputDataDir + File.separator + fileName;
-//						jobExecutionContext.addOutputFile(localFile);
-//						output.setValue(localFile);
-//						outputArray.add(output);
-//					}
-//
-//				} else if (DataType.STDOUT == output.getType()) {
-//					String downloadFile = jobExecutionContext.getStandardOutput();
-//					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-//					remoteCluster.scpFrom(downloadFile, outputDataDir);
-//					String localFile = outputDataDir + File.separator + fileName;
-//					jobExecutionContext.addOutputFile(localFile);
-//					jobExecutionContext.setStandardOutput(localFile);
-//					output.setValue(localFile);
-//					outputArray.add(output);
-//
-//				} else if (DataType.STDERR == output.getType()) {
-//					String downloadFile = jobExecutionContext.getStandardError();
-//					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-//					remoteCluster.scpFrom(downloadFile, outputDataDir);
-//					String localFile = outputDataDir + File.separator + fileName;
-//					jobExecutionContext.addOutputFile(localFile);
-//					jobExecutionContext.setStandardError(localFile);
-//					output.setValue(localFile);
-//					outputArray.add(output);
-//
-//				}
-//			}
-//			if (outputArray == null || outputArray.isEmpty()) {
-//				log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
-//				if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
-//					throw new GFacHandlerException("Empty Output returned from the Application, Double check the application"
-//							+ "and ApplicationDescriptor output Parameter Names");
-//				}
-//			}
-//
-//			if (missingOutput) {
-//				String arrayString = Arrays.deepToString(outputArray.toArray());
-//				log.error(arrayString);
-//				throw new GFacHandlerException("Required output is missing");
-//			}
-//		} catch (Exception e) {
-//			throw new GFacHandlerException(e);
-//		}
-//		jobExecutionContext.getTaskData().setApplicationOutputs(outputArray);
-//		return outputArray;
-//	}
-//}


[2/2] airavata git commit: Deleted unused classes

Posted by sh...@apache.org.
Deleted unused classes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/08cdad26
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/08cdad26
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/08cdad26

Branch: refs/heads/master
Commit: 08cdad2645ffcea9b499a23afe408d9c2bd2896f
Parents: 140d9bd
Author: Shameera Rathnayaka <sh...@gmail.com>
Authored: Mon Nov 9 10:59:30 2015 -0500
Committer: Shameera Rathnayaka <sh...@gmail.com>
Committed: Mon Nov 9 10:59:30 2015 -0500

----------------------------------------------------------------------
 .../gfac/impl/AiravataJobStatusUpdator.java     | 120 ----
 .../gfac/impl/AiravataProcessStatusUpdator.java | 125 -----
 .../gfac/impl/AiravataTaskStatusUpdator.java    | 159 ------
 .../ssh/handler/AdvancedSCPInputHandler.java    | 229 --------
 .../ssh/handler/AdvancedSCPOutputHandler.java   | 225 --------
 .../gfac/ssh/handler/NewSSHOutputHandler.java   |  78 ---
 .../ssh/handler/SSHDirectorySetupHandler.java   | 119 ----
 .../gfac/ssh/handler/SSHInputHandler.java       | 198 -------
 .../gfac/ssh/handler/SSHOutputHandler.java      | 256 ---------
 .../gfac/ssh/provider/impl/SSHProvider.java     | 473 ----------------
 .../airavata/gfac/ssh/util/GFACSSHUtils.java    | 562 -------------------
 .../airavata/gfac/ssh/util/HandleOutputs.java   |  96 ----
 12 files changed, 2640 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataJobStatusUpdator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataJobStatusUpdator.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataJobStatusUpdator.java
deleted file mode 100644
index e029324..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataJobStatusUpdator.java
+++ /dev/null
@@ -1,120 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.impl;
-//
-//import com.google.common.eventbus.Subscribe;
-//import org.apache.airavata.common.utils.AiravataUtils;
-//import org.apache.airavata.common.utils.LocalEventPublisher;
-//import org.apache.airavata.common.utils.listener.AbstractActivityListener;
-//import org.apache.airavata.messaging.core.MessageContext;
-//import org.apache.airavata.messaging.core.Publisher;
-//import org.apache.airavata.model.job.JobModel;
-//import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-//import org.apache.airavata.model.messaging.event.JobStatusChangeRequestEvent;
-//import org.apache.airavata.model.messaging.event.MessageType;
-//import org.apache.airavata.model.status.JobState;
-//import org.apache.airavata.model.status.JobStatus;
-//import org.apache.airavata.registry.cpi.CompositeIdentifier;
-//import org.apache.airavata.registry.cpi.ExperimentCatalog;
-//import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.util.Calendar;
-//
-//public class AiravataJobStatusUpdator implements AbstractActivityListener {
-//    private final static Logger logger = LoggerFactory.getLogger(AiravataJobStatusUpdator.class);
-//    private ExperimentCatalog airavataExperimentCatalog;
-//
-//    private LocalEventPublisher localEventPublisher;
-//    private Publisher publisher;
-//
-//
-//    public ExperimentCatalog getAiravataExperimentCatalog() {
-//        return airavataExperimentCatalog;
-//    }
-//
-//    public void setAiravataExperimentCatalog(ExperimentCatalog airavataExperimentCatalog) {
-//        this.airavataExperimentCatalog = airavataExperimentCatalog;
-//    }
-//
-//
-//    @Subscribe
-//    public void updateRegistry(JobStatusChangeRequestEvent jobStatus) throws Exception{
-//        /* Here we need to parse the jobStatus message and update
-//                the registry accordingly, for now we are just printing to standard Out
-//                 */
-//        JobState state = jobStatus.getState();
-//        if (state != null) {
-//            try {
-//                String taskID = jobStatus.getJobIdentity().getTaskId();
-//                String jobID = jobStatus.getJobIdentity().getJobId();
-//                String expId = jobStatus.getJobIdentity().getExperimentId();
-//                updateJobStatus(expId,taskID, jobID, state);
-//    			logger.debug("expId - {}: Publishing job status for " + jobStatus.getJobIdentity().getJobId() + ":"
-//                        + state.toString(),jobStatus.getJobIdentity().getExperimentId());
-//                JobStatusChangeEvent event = new JobStatusChangeEvent(jobStatus.getState(), jobStatus.getJobIdentity());
-//                localEventPublisher.publish(event);
-//                String messageId = AiravataUtils.getId("JOB");
-//                MessageContext msgCntxt = new MessageContext(event, MessageType.JOB, messageId, jobStatus.getJobIdentity().getGatewayId());
-//                msgCntxt.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-//                publisher.publish(msgCntxt);
-//            } catch (Exception e) {
-//                logger.error("expId - " + jobStatus.getJobIdentity().getExperimentId() + ": Error persisting data"
-//                        + e.getLocalizedMessage(), e);
-//                throw new Exception("Error persisting job status..", e);
-//            }
-//        }
-//    }
-//
-//    public  void updateJobStatus(String expId, String taskId, String jobID, JobState state) throws Exception {
-//        logger.info("expId - {}: Updating job status for " + jobID + ":" + state.toString(), expId);
-//        CompositeIdentifier ids = new CompositeIdentifier(taskId, jobID);
-//        JobModel jobModel = (JobModel) airavataExperimentCatalog.get(ExperimentCatalogModelType.JOB_DETAIL, ids);
-//        if (jobModel == null) {
-//            jobModel = new JobModel();
-//        }
-//        JobStatus status = new JobStatus();
-//        if (JobState.CANCELED.equals(jobModel.getJobStatus().getJobState())) {
-//            status.setJobState(jobModel.getJobStatus().getJobState());
-//        } else {
-//            status.setJobState(state);
-//        }
-//        status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
-//        jobModel.setJobStatus(status);
-//        jobModel.setJobId(jobID);
-//        logger.debug("expId - {}: Updated job status for " + jobID + ":" + jobModel.getJobStatus().toString(), expId);
-//        airavataExperimentCatalog.update(ExperimentCatalogModelType.JOB_STATUS, status, ids);
-//    }
-//
-//	@SuppressWarnings("unchecked")
-//	public void setup(Object... configurations) {
-//		for (Object configuration : configurations) {
-//			if (configuration instanceof ExperimentCatalog){
-//				this.airavataExperimentCatalog =(ExperimentCatalog)configuration;
-//			} else if (configuration instanceof LocalEventPublisher){
-//				this.localEventPublisher =(LocalEventPublisher) configuration;
-//			} else if (configuration instanceof Publisher){
-//                this.publisher=(Publisher) configuration;
-//            }
-//		}
-//	}
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataProcessStatusUpdator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataProcessStatusUpdator.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataProcessStatusUpdator.java
deleted file mode 100644
index 6f85e6c..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataProcessStatusUpdator.java
+++ /dev/null
@@ -1,125 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.impl;
-//
-//import com.google.common.eventbus.Subscribe;
-//import org.apache.airavata.common.utils.AiravataUtils;
-//import org.apache.airavata.common.utils.LocalEventPublisher;
-//import org.apache.airavata.common.utils.listener.AbstractActivityListener;
-//import org.apache.airavata.messaging.core.MessageContext;
-//import org.apache.airavata.messaging.core.Publisher;
-//import org.apache.airavata.model.messaging.event.MessageType;
-//import org.apache.airavata.model.messaging.event.TaskStatusChangeEvent;
-//import org.apache.airavata.model.messaging.event.WorkflowIdentifier;
-//import org.apache.airavata.model.messaging.event.WorkflowNodeStatusChangeEvent;
-//import org.apache.airavata.model.experiment.WorkflowNodeDetails;
-//import org.apache.airavata.model.experiment.WorkflowNodeState;
-//import org.apache.airavata.model.experiment.WorkflowNodeStatus;
-//import org.apache.airavata.model.status.ProcessState;
-//import org.apache.airavata.model.status.ProcessStatus;
-//import org.apache.airavata.registry.cpi.ExperimentCatalog;
-//import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.util.Calendar;
-//
-//public class AiravataProcessStatusUpdator implements AbstractActivityListener {
-//    private final static Logger logger = LoggerFactory.getLogger(AiravataProcessStatusUpdator.class);
-//
-//    private ExperimentCatalog airavataExperimentCatalog;
-//    private LocalEventPublisher localEventPublisher;
-//    private Publisher publisher;
-//
-//
-//
-//
-//    public ExperimentCatalog getAiravataExperimentCatalog() {
-//        return airavataExperimentCatalog;
-//    }
-//
-//    public void setAiravataExperimentCatalog(ExperimentCatalog airavataExperimentCatalog) {
-//        this.airavataExperimentCatalog = airavataExperimentCatalog;
-//    }
-//
-//    @Subscribe
-//    public void setupProcessStatus(TaskStatusChangeEvent taskStatus) throws Exception{
-//        ProcessState state;
-//    	switch(taskStatus.getState()){
-//    	case CANCELED:
-//    		state=ProcessState.CANCELED; break;
-//    	case COMPLETED:
-//    		state=ProcessState.EXECUTING; break;
-//    	case FAILED:
-//    		state=ProcessState.FAILED; break;
-//    	case EXECUTING:
-//    		state=ProcessState.EXECUTING; break;
-//		default:
-//			return;
-//    	}
-//    	try {
-//            String expId = taskStatus.getTaskIdentity().getExperimentId();
-//			updateWorkflowNodeStatus(expId, taskStatus.getTaskIdentity().getWorkflowNodeId(), state);
-//            logger.debug("expId - {}: Publishing workflow node status for " + taskStatus.getTaskIdentity().getWorkflowNodeId()
-//                    + ":" + state.toString(), taskStatus.getTaskIdentity().getExperimentId());
-//            WorkflowIdentifier workflowIdentity = new WorkflowIdentifier(taskStatus.getTaskIdentity().getWorkflowNodeId(),
-//                                                                         taskStatus.getTaskIdentity().getExperimentId(),
-//                                                                         taskStatus.getTaskIdentity().getGatewayId());
-//            WorkflowNodeStatusChangeEvent event = new WorkflowNodeStatusChangeEvent(state, workflowIdentity);
-//            localEventPublisher.publish(event);
-//            String messageId = AiravataUtils.getId("WFNODE");
-//            MessageContext msgCntxt = new MessageContext(event, MessageType.WORKFLOWNODE, messageId, taskStatus.getTaskIdentity().getGatewayId());
-//            msgCntxt.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-//
-//            publisher.publish(msgCntxt);
-//		} catch (Exception e) {
-//            logger.error("expId - " + taskStatus.getTaskIdentity().getExperimentId() + ": Error persisting data"
-//                    + e.getLocalizedMessage(), e);
-//            throw new Exception("Error persisting workflow node status..", e);
-//        }
-//    }
-//
-//    public  void updateWorkflowNodeStatus(String experimentId, String workflowNodeId, WorkflowNodeState state) throws Exception {
-//		logger.info("expId - {}: Updating workflow node status for "+workflowNodeId+":"+state.toString(), experimentId);
-//    	WorkflowNodeDetails details = (WorkflowNodeDetails) airavataExperimentCatalog.get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL, workflowNodeId);
-//        if(details == null) {
-//            details = new WorkflowNodeDetails();
-//            details.setNodeInstanceId(workflowNodeId);
-//        }
-//        WorkflowNodeStatus status = new WorkflowNodeStatus();
-//        status.setWorkflowNodeState(state);
-//        status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
-//        details.setWorkflowNodeStatus(status);
-//        airavataExperimentCatalog.update(ExperimentCatalogModelType.WORKFLOW_NODE_STATUS, status, workflowNodeId);
-//    }
-//
-//	public void setup(Object... configurations) {
-//		for (Object configuration : configurations) {
-//			if (configuration instanceof ExperimentCatalog){
-//				this.airavataExperimentCatalog =(ExperimentCatalog)configuration;
-//			} else if (configuration instanceof LocalEventPublisher){
-//				this.localEventPublisher =(LocalEventPublisher) configuration;
-//			}  else if (configuration instanceof Publisher){
-//                this.publisher=(Publisher) configuration;
-//            }
-//        }
-//	}
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataTaskStatusUpdator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataTaskStatusUpdator.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataTaskStatusUpdator.java
deleted file mode 100644
index 8ddacc4..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/AiravataTaskStatusUpdator.java
+++ /dev/null
@@ -1,159 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.impl;
-//
-//import com.google.common.eventbus.Subscribe;
-//import org.apache.airavata.common.exception.AiravataException;
-//import org.apache.airavata.common.utils.AiravataUtils;
-//import org.apache.airavata.common.utils.LocalEventPublisher;
-//import org.apache.airavata.common.utils.listener.AbstractActivityListener;
-//import org.apache.airavata.messaging.core.MessageContext;
-//import org.apache.airavata.messaging.core.Publisher;
-//import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-//import org.apache.airavata.model.messaging.event.MessageType;
-//import org.apache.airavata.model.messaging.event.TaskIdentifier;
-//import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
-//import org.apache.airavata.model.messaging.event.TaskStatusChangeEvent;
-//import org.apache.airavata.model.messaging.event.TaskStatusChangeRequestEvent;
-//import org.apache.airavata.model.status.TaskState;
-//import org.apache.airavata.registry.cpi.ExperimentCatalog;
-//import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.util.Calendar;
-//
-//public class AiravataTaskStatusUpdator implements AbstractActivityListener {
-//    private final static Logger logger = LoggerFactory.getLogger(AiravataTaskStatusUpdator.class);
-//    private ExperimentCatalog airavataExperimentCatalog;
-//    private LocalEventPublisher localEventPublisher;
-//    private Publisher publisher;
-//
-//    public ExperimentCatalog getAiravataExperimentCatalog() {
-//        return airavataExperimentCatalog;
-//    }
-//
-//    public void setAiravataExperimentCatalog(ExperimentCatalog airavataExperimentCatalog) {
-//        this.airavataExperimentCatalog = airavataExperimentCatalog;
-//    }
-//
-//    @Subscribe
-//    public void setupTaskStatus(TaskStatusChangeRequestEvent taskStatus) throws Exception{
-//    	try {
-//			updateTaskStatus(taskStatus.getTaskIdentity().getTaskId(), taskStatus.getState());
-//            logger.debug("expId - {}: Publishing task status for " + taskStatus.getTaskIdentity().getTaskId() + ":"
-//                    + taskStatus.getState().toString(), taskStatus.getTaskIdentity().getExperimentId());
-//            TaskStatusChangeEvent event = new TaskStatusChangeEvent(taskStatus.getState(), taskStatus.getTaskIdentity());
-//            localEventPublisher.publish(event);
-//            String messageId = AiravataUtils.getId("TASK");
-//            MessageContext msgCntxt = new MessageContext(event, MessageType.TASK, messageId, taskStatus.getTaskIdentity().getGatewayId());
-//            msgCntxt.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-//            publisher.publish(msgCntxt);
-//		} catch (Exception e) {
-//            String msg = "Error persisting data task status to database...";
-//            logger.error(msg + e.getLocalizedMessage(), e);
-//            throw new Exception(msg, e);
-//		}
-//    }
-//
-//    @Subscribe
-//    public void setupTaskStatus(JobStatusChangeEvent jobStatus) throws Exception{
-//    	TaskState state;
-//    	switch(jobStatus.getState()){
-//    	case ACTIVE:
-//    		state=TaskState.EXECUTING; break;
-//    	case CANCELED:
-//    		state=TaskState.CANCELED; break;
-//    	case COMPLETE: case FAILED:
-//    		state=TaskState.EXECUTING; break;
-//    	case SUSPENDED: case QUEUED:
-//    		state=TaskState.EXECUTING; break;
-//    	case SUBMITTED:
-//    		state=TaskState.EXECUTING; break;
-//		default:
-//			return;
-//    	}
-//    	try {
-//			updateTaskStatus(jobStatus.getJobIdentity().getTaskId(), state);
-//            logger.debug("expId - {}: Publishing task status for " + jobStatus.getJobIdentity().getTaskId() + ":"
-//                    + state.toString(), jobStatus.getJobIdentity().getExperimentId());
-//            TaskIdentifier taskIdentity = new TaskIdentifier(jobStatus.getJobIdentity().getTaskId(),
-//                                                         jobStatus.getJobIdentity().getWorkflowNodeId(),
-//                                                         jobStatus.getJobIdentity().getExperimentId(),
-//                                                         jobStatus.getJobIdentity().getGatewayId());
-//            TaskStatusChangeEvent event = new TaskStatusChangeEvent(state, taskIdentity);
-//            localEventPublisher.publish(event);
-//            String messageId = AiravataUtils.getId("TASK");
-//            MessageContext msgCntxt = new MessageContext(event, MessageType.TASK, messageId,jobStatus.getJobIdentity().getGatewayId());
-//            msgCntxt.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-//            publisher.publish(msgCntxt);
-//
-//        }  catch (Exception e) {
-//            logger.error("expId - " + jobStatus.getJobIdentity().getExperimentId() + ": Error persisting data" + e.getLocalizedMessage(), e);
-//            throw new Exception("Error persisting task status..", e);
-//		}
-//    }
-//
-//    public  TaskState updateTaskStatus(String taskId, TaskState state) throws Exception {
-//    	TaskDetails details = (TaskDetails) airavataExperimentCatalog.get(ExperimentCatalogModelType.TASK_DETAIL, taskId);
-//        if(details == null) {
-//            logger.error("Task details cannot be null at this point");
-//            throw new Exception("Task details cannot be null at this point");
-//        }
-//        org.apache.airavata.model.workspace.experiment.TaskStatus status = new org.apache.airavata.model.workspace.experiment.TaskStatus();
-//        if(!TaskState.CANCELED.equals(details.getTaskStatus().getExecutionState())
-//                && !TaskState.CANCELING.equals(details.getTaskStatus().getExecutionState())){
-//            status.setExecutionState(state);
-//        }else{
-//            status.setExecutionState(details.getTaskStatus().getExecutionState());
-//        }
-//        status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
-//        details.setTaskStatus(status);
-//        logger.debug("Updating task status for "+taskId+":"+details.getTaskStatus().toString());
-//
-//        airavataExperimentCatalog.update(ExperimentCatalogModelType.TASK_STATUS, status, taskId);
-//        return status.getExecutionState();
-//    }
-//
-//	public void setup(Object... configurations) {
-//		for (Object configuration : configurations) {
-//			if (configuration instanceof ExperimentCatalog){
-//				this.airavataExperimentCatalog =(ExperimentCatalog)configuration;
-//			} else if (configuration instanceof LocalEventPublisher){
-//				this.localEventPublisher =(LocalEventPublisher) configuration;
-//			} else if (configuration instanceof Publisher){
-//                this.publisher=(Publisher) configuration;
-//            }
-//        }
-//	}
-//
-//
-//    @Subscribe
-//    public void taskOutputChanged(TaskOutputChangeEvent taskOutputEvent) throws AiravataException {
-//        String taskId = taskOutputEvent.getTaskIdentity().getTaskId();
-//        logger.debug("Task Output changed event received for workflow node : " +
-//                taskOutputEvent.getTaskIdentity().getWorkflowNodeId() + ", task : " + taskId);
-//        // TODO - do we need to update the output to the registry? , we do it in the workflowInterpreter too.
-//        MessageContext messageContext = new MessageContext(taskOutputEvent, MessageType.TASKOUTPUT, taskOutputEvent.getTaskIdentity().getTaskId(), taskOutputEvent.getTaskIdentity().getGatewayId());
-//        messageContext.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-//        publisher.publish(messageContext);
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
deleted file mode 100644
index 587bf46..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
+++ /dev/null
@@ -1,229 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.ssh.handler;
-//
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.SSHApiException;
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.context.MessageContext;
-//import org.apache.airavata.gfac.core.handler.AbstractHandler;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-//import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-//import org.apache.airavata.model.appcatalog.appinterface.DataType;
-//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-//import org.apache.airavata.model.experiment.*;
-//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.io.File;
-//import java.io.PrintWriter;
-//import java.io.StringWriter;
-//import java.net.MalformedURLException;
-//import java.net.URL;
-//import java.util.*;
-//
-///**
-// * This handler will copy input data from gateway machine to airavata
-// * installed machine, later running handlers can copy the input files to computing resource
-// * <Handler class="AdvancedSCPOutputHandler">
-// * <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
-// * <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
-// * <property name="userName" value="airavata"/>
-// * <property name="hostName" value="gw98.iu.xsede.org"/>
-// * <property name="inputPath" value="/home/airavata/outputData"/>
-// */
-//public class AdvancedSCPInputHandler extends AbstractHandler {
-//    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPInputHandler.class);
-//    public static final String ADVANCED_SSH_AUTH = "advanced.ssh.auth";
-//    public static final int DEFAULT_SSH_PORT = 22;
-//
-//    private String password = null;
-//
-//    private String publicKeyPath;
-//
-//    private String passPhrase;
-//
-//    private String privateKeyPath;
-//
-//    private String userName;
-//
-//    private String hostName;
-//
-//    private String inputPath;
-//
-//    public void initProperties(Properties properties) throws GFacHandlerException {
-//        password = (String) properties.get("password");
-//        passPhrase = (String) properties.get("passPhrase");
-//        privateKeyPath = (String) properties.get("privateKeyPath");
-//        publicKeyPath = (String) properties.get("publicKeyPath");
-//        userName = (String) properties.get("userName");
-//        hostName = (String) properties.get("hostName");
-//        inputPath = (String) properties.get("inputPath");
-//    }
-//
-//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        super.invoke(jobExecutionContext);
-//        int index = 0;
-//        int oldIndex = 0;
-//        List<String> oldFiles = new ArrayList<String>();
-//        MessageContext inputNew = new MessageContext();
-//        StringBuffer data = new StringBuffer("|");
-//        RemoteCluster remoteCluster = null;
-//
-//        try {
-//            String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
-//            if (pluginData != null) {
-//                try {
-//                    oldIndex = Integer.parseInt(pluginData.split("\\|")[0].trim());
-//                    oldFiles = Arrays.asList(pluginData.split("\\|")[1].split(","));
-//                    if (oldIndex == oldFiles.size()) {
-//                        log.info("Old data looks good !!!!");
-//                    } else {
-//                        oldIndex = 0;
-//                        oldFiles.clear();
-//                    }
-//                } catch (NumberFormatException e) {
-//                    log.error("Previously stored data " + pluginData + " is wrong so we continue the operations");
-//                }
-//            }
-//
-//            AuthenticationInfo authenticationInfo = null;
-//            if (password != null) {
-//                authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
-//            } else {
-//                authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
-//                        this.passPhrase);
-//            }
-//
-//            // Server info
-//            String parentPath = inputPath + File.separator + jobExecutionContext.getExperimentID() + File.separator + jobExecutionContext.getTaskData().getTaskID();
-//            if (index < oldIndex) {
-//                parentPath = oldFiles.get(index);
-//                data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-//            } else {
-//                (new File(parentPath)).mkdirs();
-//                StringBuffer temp = new StringBuffer(data.append(parentPath).append(",").toString());
-//                GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-//            }
-//            DataTransferDetails detail = new DataTransferDetails();
-//            TransferStatus status = new TransferStatus();
-//            // here doesn't matter what the job manager is because we are only doing some file handling
-//            // not really dealing with monitoring or job submission, so we pa
-//
-//            MessageContext input = jobExecutionContext.getInMessageContext();
-//            Set<String> parameters = input.getParameters().keySet();
-//            for (String paramName : parameters) {
-//                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-//                String paramValue = inputParamType.getValue();
-//                // TODO: Review this with type
-//                if (inputParamType.getType() == DataType.URI) {
-//                    try {
-//                        URL file = new URL(paramValue);
-//                        String key = file.getUserInfo() + file.getHost() + DEFAULT_SSH_PORT;
-//                        GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, file.getUserInfo(), file.getHost(), DEFAULT_SSH_PORT);
-//                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
-//                        paramValue = file.getPath();
-//                    } catch (MalformedURLException e) {
-//                        String key = this.userName + this.hostName + DEFAULT_SSH_PORT;
-//                        GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
-//                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
-//                        log.error(e.getLocalizedMessage(), e);
-//                    }
-//
-//                    if (index < oldIndex) {
-//                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-//                        inputParamType.setValue(oldFiles.get(index));
-//                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-//                    } else {
-//                        String stageInputFile = stageInputFiles(remoteCluster, paramValue, parentPath);
-//                        inputParamType.setValue(stageInputFile);
-//                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
-//                        status.setTransferState(TransferState.UPLOAD);
-//                        detail.setTransferStatus(status);
-//                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
-//                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//
-//                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-//                    }
-//                }
-//                // FIXME: what is the thrift model DataType equivalent for URIArray type?
-////                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-////                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-////                    List<String> newFiles = new ArrayList<String>();
-////                    for (String paramValueEach : split) {
-////                        try {
-////                            URL file = new URL(paramValue);
-////                            this.userName = file.getUserInfo();
-////                            this.hostName = file.getHost();
-////                            paramValueEach = file.getPath();
-////                        } catch (MalformedURLException e) {
-////                            log.error(e.getLocalizedMessage(), e);
-////                        }
-////                        if (index < oldIndex) {
-////                            log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-////                            newFiles.add(oldFiles.get(index));
-////                            data.append(oldFiles.get(index++)).append(",");
-////                        } else {
-////                            String stageInputFiles = stageInputFiles(remoteCluster, paramValueEach, parentPath);
-////                            StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
-////                            GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-////                            newFiles.add(stageInputFiles);
-////                        }
-////                    }
-////                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
-////                }
-//                inputNew.getParameters().put(paramName, inputParamType);
-//            }
-//        } catch (Exception e) {
-//            log.error(e.getMessage());
-//            try {
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//            } catch (GFacException e1) {
-//                log.error(e1.getLocalizedMessage());
-//            }
-//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-//        }
-//        jobExecutionContext.setInMessageContext(inputNew);
-//    }
-//
-//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        this.invoke(jobExecutionContext);
-//    }
-//
-//    private String stageInputFiles(RemoteCluster remoteCluster, String paramValue, String parentPath) throws GFacException {
-//        try {
-//            remoteCluster.scpFrom(paramValue, parentPath);
-//            return "file://" + parentPath + File.separator + (new File(paramValue)).getName();
-//        } catch (SSHApiException e) {
-//            log.error("Error tranfering remote file to local file, remote path: " + paramValue);
-//            throw new GFacException(e);
-//        }
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
deleted file mode 100644
index 175351f..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ /dev/null
@@ -1,225 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.ssh.handler;
-//
-//import org.apache.airavata.common.exception.ApplicationSettingsException;
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.SSHApiException;
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.handler.AbstractHandler;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-//import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-//import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-//import org.apache.airavata.model.appcatalog.appinterface.DataType;
-//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-//import org.apache.airavata.model.experiment.CorrectiveAction;
-//import org.apache.airavata.model.experiment.ErrorCategory;
-//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.io.File;
-//import java.io.PrintWriter;
-//import java.io.StringWriter;
-//import java.net.MalformedURLException;
-//import java.net.URL;
-//import java.util.*;
-//
-///**
-// * This handler will copy outputs from airavata installed local directory
-// * to a remote location, prior to this handler SCPOutputHandler should be invoked
-// * Should add following configuration to gfac-config.xml and configure the keys properly
-// * <Handler class="AdvancedSCPOutputHandler">
-//                            <property name="privateKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa"/>
-//                            <property name="publicKeyPath" value="/Users/lahirugunathilake/.ssh/id_dsa.pub"/>
-//                        <property name="userName" value="airavata"/>
-//                        <property name="hostName" value="gw98.iu.xsede.org"/>
-//                        <property name="outputPath" value="/home/airavata/outputData"/>
-//                        <property name="passPhrase" value="/home/airavata/outputData"/>
-//                        <property name="password" value="/home/airavata/outputData"/>
-//
-// */
-//public class AdvancedSCPOutputHandler extends AbstractHandler {
-//    private static final Logger log = LoggerFactory.getLogger(AdvancedSCPOutputHandler.class);
-//
-//    public static final int DEFAULT_SSH_PORT = 22;
-//
-//    private String password = null;
-//
-//    private String publicKeyPath;
-//
-//    private String passPhrase;
-//
-//    private String privateKeyPath;
-//
-//    private String userName;
-//
-//    private String hostName;
-//
-//    private String outputPath;
-//
-//
-//    public void initProperties(Properties properties) throws GFacHandlerException {
-//        password = (String)properties.get("password");
-//        passPhrase = (String)properties.get("passPhrase");
-//        privateKeyPath = (String)properties.get("privateKeyPath");
-//        publicKeyPath = (String)properties.get("publicKeyPath");
-//        userName = (String)properties.get("userName");
-//        hostName = (String)properties.get("hostName");
-//        outputPath = (String)properties.get("outputPath");
-//    }
-//
-//    @Override
-//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//    	RemoteCluster remoteCluster = null;
-//        AuthenticationInfo authenticationInfo = null;
-//        if (password != null) {
-//            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
-//        } else {
-//            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
-//                    this.passPhrase);
-//        }
-//        try {
-//            String hostName = jobExecutionContext.getHostName();
-//            if (jobExecutionContext.getSecurityContext(hostName) == null) {
-//                try {
-//                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//                } catch (ApplicationSettingsException e) {
-//                    log.error(e.getMessage());
-//                    try {
-//                        StringWriter errors = new StringWriter();
-//                        e.printStackTrace(new PrintWriter(errors));
-//         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//         			} catch (GFacException e1) {
-//         				 log.error(e1.getLocalizedMessage());
-//         			}
-//                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//                }
-//            }
-//            String standardError = jobExecutionContext.getStandardError();
-//            String standardOutput = jobExecutionContext.getStandardOutput();
-//            super.invoke(jobExecutionContext);
-//            // Server info
-//            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){
-//                try{
-//                    URL outputPathURL = new URL(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir());
-//                    this.userName = outputPathURL.getUserInfo();
-//                    this.hostName = outputPathURL.getHost();
-//                    outputPath = outputPathURL.getPath();
-//                } catch (MalformedURLException e) {
-//                    log.error(e.getLocalizedMessage(),e);
-//                }
-//            }
-//            String key = GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
-//            remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
-//            if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && !jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().isPersistOutputData()){
-//            outputPath = outputPath + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID()
-//                    + File.separator;
-//                remoteCluster.makeDirectory(outputPath);
-//            }
-//            remoteCluster.scpTo(outputPath, standardError);
-//            remoteCluster.scpTo(outputPath, standardOutput);
-//            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-//            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-//            Set<String> keys = output.keySet();
-//            for (String paramName : keys) {
-//                OutputDataObjectType outputDataObjectType = (OutputDataObjectType) output.get(paramName);
-//                if (outputDataObjectType.getType() == DataType.URI) {
-//                    // for failed jobs outputs are not generated. So we should not download outputs
-//                    if (GFacUtils.isFailedJob(jobExecutionContext)){
-//                        continue;
-//                    }
-//                	String downloadFile = outputDataObjectType.getValue();
-//                    if(downloadFile == null || !(new File(downloadFile).isFile())){
-//                        GFacUtils.saveErrorDetails(jobExecutionContext, "Empty Output returned from the application", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//                		throw new GFacHandlerException("Empty Output returned from the application.." );
-//                	}
-//                	remoteCluster.scpTo(outputPath, downloadFile);
-//                    String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
-//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-//                    dataObjectType.setName(paramName);
-//                    dataObjectType.setType(DataType.URI);
-//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-//                    outputArray.add(dataObjectType);
-//                }else if (outputDataObjectType.getType() == DataType.STDOUT) {
-//                    remoteCluster.scpTo(outputPath, standardOutput);
-//                    String fileName = standardOutput.substring(standardOutput.lastIndexOf(File.separatorChar)+1, standardOutput.length());
-//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-//                    dataObjectType.setName(paramName);
-//                    dataObjectType.setType(DataType.STDOUT);
-//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-//                    outputArray.add(dataObjectType);
-//                }else if (outputDataObjectType.getType() == DataType.STDERR) {
-//                    remoteCluster.scpTo(outputPath, standardError);
-//                    String fileName = standardError.substring(standardError.lastIndexOf(File.separatorChar)+1, standardError.length());
-//                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
-//                    dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-//                    dataObjectType.setName(paramName);
-//                    dataObjectType.setType(DataType.STDERR);
-//                    dataObjectType.setIsRequired(outputDataObjectType.isIsRequired());
-//                    dataObjectType.setRequiredToAddedToCommandLine(outputDataObjectType.isRequiredToAddedToCommandLine());
-//                    dataObjectType.setApplicationArgument(outputDataObjectType.getApplicationArgument());
-//                    dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
-//                    outputArray.add(dataObjectType);
-//                }
-//             }
-//           experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-//        } catch (SSHApiException e) {
-//            try {
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//			} catch (GFacException e1) {
-//				 log.error(e1.getLocalizedMessage());
-//			}
-//            log.error("Error transfering files to remote host : " + hostName + " with the user: " + userName);
-//            log.error(e.getMessage());
-//            throw new GFacHandlerException(e);
-//        } catch (Exception e) {
-//        	 try {
-// 				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-// 			} catch (GFacException e1) {
-// 				 log.error(e1.getLocalizedMessage());
-// 			}
-//        	throw new GFacHandlerException(e);
-//        }
-//    }
-//
-//    @Override
-//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        // TODO: Auto generated method body.
-//    }
-//
-//
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
deleted file mode 100644
index 5dc9f2a..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
+++ /dev/null
@@ -1,78 +0,0 @@
-//package org.apache.airavata.gfac.ssh.handler;
-//
-//import java.io.PrintWriter;
-//import java.io.StringWriter;
-//import java.util.List;
-//import java.util.Properties;
-//
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.handler.AbstractHandler;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.provider.GFacProviderException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-//import org.apache.airavata.gfac.ssh.util.HandleOutputs;
-//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-//import org.apache.airavata.model.experiment.CorrectiveAction;
-//import org.apache.airavata.model.experiment.ErrorCategory;
-//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-//import org.apache.airavata.registry.cpi.RegistryException;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//public class NewSSHOutputHandler extends AbstractHandler{
-//
-//	 private static final Logger log = LoggerFactory.getLogger(NewSSHOutputHandler.class);
-//
-//	    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//	        String hostAddress = jobExecutionContext.getHostName();
-//	      	RemoteCluster remoteCluster = null;
-//	      	// Security Context and connection
-//	        try {
-//	            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-//	                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//	            }
-//	            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-//	            if (remoteCluster == null) {
-//	                throw new GFacProviderException("Security context is not set properly");
-//	            } else {
-//	                log.info("Successfully retrieved the Security Context");
-//	            }
-//	        } catch (Exception e) {
-//	            log.error(e.getMessage());
-//	            try {
-//                    StringWriter errors = new StringWriter();
-//                    e.printStackTrace(new PrintWriter(errors));
-//	                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//	            } catch (GFacException e1) {
-//	                log.error(e1.getLocalizedMessage());
-//	            }
-//	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//	        }
-//
-//	        super.invoke(jobExecutionContext);
-//	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
-//	        try {
-//				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-//			} catch (RegistryException e) {
-//				throw new GFacHandlerException(e);
-//			}
-//
-//
-//	    }
-//
-//    @Override
-//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        // TODO: Auto generated method body.
-//    }
-//
-//    @Override
-//	public void initProperties(Properties properties) throws GFacHandlerException {
-//		// TODO Auto-generated method stub
-//
-//	}
-//
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
deleted file mode 100644
index d8afb06..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
+++ /dev/null
@@ -1,119 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.ssh.handler;
-//
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.handler.AbstractHandler;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-//import org.apache.airavata.model.experiment.*;
-//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.io.PrintWriter;
-//import java.io.StringWriter;
-//import java.util.Properties;
-//
-//public class SSHDirectorySetupHandler extends AbstractHandler {
-//    private static final Logger log = LoggerFactory.getLogger(SSHDirectorySetupHandler.class);
-//
-//	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        try {
-//            String hostAddress = jobExecutionContext.getHostName();
-//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-//                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//            }
-//        } catch (Exception e) {
-//            log.error(e.getMessage());
-//            try {
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-// 				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-// 			} catch (GFacException e1) {
-// 				 log.error(e1.getLocalizedMessage());
-// 			}
-//            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//        }
-//
-//        log.info("Setup SSH job directorties");
-//        super.invoke(jobExecutionContext);
-//        makeDirectory(jobExecutionContext);
-//
-//	}
-//
-//    @Override
-//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        // TODO: Auto generated method body.
-//    }
-//
-//    private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//		RemoteCluster remoteCluster = null;
-//		try{
-//            String hostAddress = jobExecutionContext.getHostName();
-//            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-//        if (remoteCluster == null) {
-//            throw new GFacHandlerException("Security context is not set properly");
-//        } else {
-//            log.info("Successfully retrieved the Security Context");
-//        }
-//            String workingDirectory = jobExecutionContext.getWorkingDir();
-//            remoteCluster.makeDirectory(workingDirectory);
-//            if(!jobExecutionContext.getInputDir().equals(workingDirectory))
-//            	remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
-//            if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
-//            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
-//
-//            DataTransferDetails detail = new DataTransferDetails();
-//            TransferStatus status = new TransferStatus();
-//            status.setTransferState(TransferState.DIRECTORY_SETUP);
-//            detail.setTransferStatus(status);
-//            detail.setTransferDescription("Working directory = " + workingDirectory);
-//
-//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//
-//        } catch (Exception e) {
-//			DataTransferDetails detail = new DataTransferDetails();
-//            TransferStatus status = new TransferStatus();
-//            status.setTransferState(TransferState.FAILED);
-//            detail.setTransferStatus(status);
-//            detail.setTransferDescription("Working directory = " + jobExecutionContext.getWorkingDir());
-//            try {
-//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-//            } catch (Exception e1) {
-//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-//            }
-//            throw new GFacHandlerException("Error executing the Handler: " + SSHDirectorySetupHandler.class, e);
-//        }
-//
-//	}
-//
-//    public void initProperties(Properties properties) throws GFacHandlerException {
-//
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
deleted file mode 100644
index b1e485a..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
+++ /dev/null
@@ -1,198 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.ssh.handler;
-//
-//import org.apache.airavata.common.exception.ApplicationSettingsException;
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.context.MessageContext;
-//import org.apache.airavata.gfac.core.handler.AbstractHandler;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-//import org.apache.airavata.model.appcatalog.appinterface.DataType;
-//import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-//import org.apache.airavata.model.experiment.*;
-//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.io.File;
-//import java.io.IOException;
-//import java.io.PrintWriter;
-//import java.io.StringWriter;
-//import java.util.ArrayList;
-//import java.util.List;
-//import java.util.Properties;
-//import java.util.Set;
-//
-//public class SSHInputHandler extends AbstractHandler {
-//
-//    private static final Logger log = LoggerFactory.getLogger(SSHInputHandler.class);
-//
-//
-//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        DataTransferDetails detail = new DataTransferDetails();
-//        detail.setTransferDescription("Input Data Staging");
-//        TransferStatus status = new TransferStatus();
-//        int index = 0;
-//        int oldIndex = 0;
-//        List<String> oldFiles = new ArrayList<String>();
-//        StringBuffer data = new StringBuffer("|");
-//        MessageContext inputNew = new MessageContext();
-//        RemoteCluster remoteCluster = null;
-//
-//        try {
-//            String hostAddress = jobExecutionContext.getHostName();
-//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-//                try {
-//                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//                } catch (ApplicationSettingsException e) {
-//                    log.error(e.getMessage());
-//                    try {
-//                        StringWriter errors = new StringWriter();
-//                        e.printStackTrace(new PrintWriter(errors));
-//         				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//         			} catch (GFacException e1) {
-//         				 log.error(e1.getLocalizedMessage());
-//         			}
-//                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//                }
-//            }
-//
-//            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-//            if (remoteCluster == null) {
-//                throw new GFacException("Security context is not set properly");
-//            } else {
-//                log.info("Successfully retrieved the Security Context");
-//            }
-//            log.info("Invoking SCPInputHandler");
-//            super.invoke(jobExecutionContext);
-//
-//
-//            MessageContext input = jobExecutionContext.getInMessageContext();
-//            Set<String> parameters = input.getParameters().keySet();
-//            for (String paramName : parameters) {
-//                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-//                String paramValue = inputParamType.getValue();
-//                //TODO: Review this with type
-//                if (inputParamType.getType() == DataType.URI) {
-//                    if (index < oldIndex) {
-//                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-//                        inputParamType.setValue(oldFiles.get(index));
-//                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-//                    } else {
-//                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
-//                        inputParamType.setValue(stageInputFile);
-//                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
-//                        status.setTransferState(TransferState.UPLOAD);
-//                        detail.setTransferStatus(status);
-//                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
-//                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//
-//                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-//                    }
-//                }// FIXME: what is the thrift model DataType equivalent for URIArray type?
-////                else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-////                	if (index < oldIndex) {
-////                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-////                        ((URIParameterType) actualParameter.getType()).setValue(oldFiles.get(index));
-////                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-////                    }else{
-////                	List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-////                    List<String> newFiles = new ArrayList<String>();
-////                    for (String paramValueEach : split) {
-////                        String stageInputFiles = stageInputFiles(remoteCluster,jobExecutionContext, paramValueEach);
-////                        status.setTransferState(TransferState.UPLOAD);
-////                        detail.setTransferStatus(status);
-////                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
-////                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-////                        newFiles.add(stageInputFiles);
-////                        StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
-////                        GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-////                    }
-////                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
-////                    }
-////                }
-//                inputNew.getParameters().put(paramName, inputParamType);
-//            }
-//        } catch (Exception e) {
-//            log.error(e.getMessage());
-//            status.setTransferState(TransferState.FAILED);
-//            detail.setTransferStatus(status);
-//            try {
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//                GFacUtils.saveErrorDetails(jobExecutionContext, errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//            } catch (Exception e1) {
-//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-//            }
-//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-//        }
-//        jobExecutionContext.setInMessageContext(inputNew);
-//    }
-//
-//    @Override
-//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        // TODO: Auto generated method body.
-//    }
-//
-//    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-//        int i = paramValue.lastIndexOf(File.separator);
-//        String substring = paramValue.substring(i + 1);
-//        try {
-//            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
-//            if(paramValue.startsWith("scp:")){
-//            	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-//            	remoteCluster.scpThirdParty(paramValue, targetFile);
-//            }else{
-//            if(paramValue.startsWith("file")){
-//                paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-//            }
-//            boolean success = false;
-//            int j = 1;
-//            while(!success){
-//            try {
-//				remoteCluster.scpTo(targetFile, paramValue);
-//				success = true;
-//			} catch (Exception e) {
-//				log.info(e.getLocalizedMessage());
-//				Thread.sleep(2000);
-//				 if(j==3) {
-//					throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-//				 }
-//            }
-//            j++;
-//            }
-//            }
-//            return targetFile;
-//        } catch (Exception e) {
-//            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-//        }
-//    }
-//
-//    public void initProperties(Properties properties) throws GFacHandlerException {
-//
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/08cdad26/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
deleted file mode 100644
index 95368f6..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
+++ /dev/null
@@ -1,256 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.gfac.ssh.handler;
-//
-//import org.apache.airavata.common.utils.ServerSettings;
-//import org.apache.airavata.gfac.core.GFacConstants;
-//import org.apache.airavata.gfac.core.GFacException;
-//import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.handler.AbstractHandler;
-//import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-//import org.apache.airavata.gfac.core.provider.GFacProviderException;
-//import org.apache.airavata.gfac.core.GFacUtils;
-//import org.apache.airavata.gfac.impl.OutputUtils;
-//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
-//import org.apache.airavata.model.appcatalog.appinterface.DataType;
-//import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-//import org.apache.airavata.model.experiment.CorrectiveAction;
-//import org.apache.airavata.model.experiment.DataTransferDetails;
-//import org.apache.airavata.model.experiment.ErrorCategory;
-//import org.apache.airavata.model.experiment.TaskDetails;
-//import org.apache.airavata.model.experiment.TransferState;
-//import org.apache.airavata.model.experiment.TransferStatus;
-//import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//
-//import java.io.File;
-//import java.io.PrintWriter;
-//import java.io.StringWriter;
-//import java.util.ArrayList;
-//import java.util.List;
-//import java.util.Map;
-//import java.util.Properties;
-//import java.util.Set;
-//
-//public class SSHOutputHandler extends AbstractHandler {
-//    private static final Logger log = LoggerFactory.getLogger(SSHOutputHandler.class);
-//
-//    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        String hostAddress = jobExecutionContext.getHostName();
-//        try {
-//            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-//                GFACSSHUtils.addSecurityContext(jobExecutionContext);
-//            }
-//        } catch (Exception e) {
-//            log.error(e.getMessage());
-//            try {
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-//            } catch (GFacException e1) {
-//                log.error(e1.getLocalizedMessage());
-//            }
-//            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-//        }
-//
-//        super.invoke(jobExecutionContext);
-//        DataTransferDetails detail = new DataTransferDetails();
-//        detail.setTransferDescription("Output data staging");
-//        TransferStatus status = new TransferStatus();
-//
-//        RemoteCluster remoteCluster = null;
-//        try {
-//             remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-//            if (remoteCluster == null) {
-//                throw new GFacProviderException("Security context is not set properly");
-//            } else {
-//                log.info("Successfully retrieved the Security Context");
-//            }
-//
-//            // Get the Stdouts and StdErrs
-//            String timeStampedExperimentID = GFacUtils.createUniqueNameWithDate(jobExecutionContext.getExperimentID());
-//
-//            TaskDetails taskData = jobExecutionContext.getTaskData();
-//            String outputDataDir = ServerSettings.getSetting(GFacConstants.OUTPUT_DATA_DIR, File.separator + "tmp");
-//            File localStdOutFile;
-//            File localStdErrFile;
-//            //FIXME: AdvancedOutput is remote location and third party transfer should work to make this work
-////            if (taskData.getAdvancedOutputDataHandling() != null) {
-////                outputDataDir = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
-////            }
-//            if (outputDataDir == null) {
-//                outputDataDir = File.separator + "tmp";
-//            }
-//            outputDataDir = outputDataDir + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID();
-//            (new File(outputDataDir)).mkdirs();
-//
-//
-//            localStdOutFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stdout");
-//            localStdErrFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stderr");
-////            remoteCluster.makeDirectory(outputDataDir);
-//            int i = 0;
-//            String stdOutStr = "";
-//            while (stdOutStr.isEmpty()) {
-//                try {
-//                    remoteCluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
-//                    stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
-//                } catch (Exception e) {
-//                    log.error(e.getLocalizedMessage());
-//                    Thread.sleep(2000);
-//                }
-//                i++;
-//                if (i == 3) break;
-//            }
-//            Thread.sleep(1000);
-//            remoteCluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
-//            Thread.sleep(1000);
-//
-//            String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
-//            status.setTransferState(TransferState.STDOUT_DOWNLOAD);
-//            detail.setTransferStatus(status);
-//            detail.setTransferDescription("STDOUT:" + localStdOutFile.getAbsolutePath());
-//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//
-//            status.setTransferState(TransferState.STDERROR_DOWNLOAD);
-//            detail.setTransferStatus(status);
-//            detail.setTransferDescription("STDERR:" + localStdErrFile.getAbsolutePath());
-//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//
-//
-//            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-//            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-//            Set<String> keys = output.keySet();
-//            for (String paramName : keys) {
-//                OutputDataObjectType actualParameter = (OutputDataObjectType) output.get(paramName);
-//                if (DataType.URI == actualParameter.getType()) {
-//                    List<String> outputList = null;
-//                    int retry = 3;
-//                    while (retry > 0) {
-//                        outputList = remoteCluster.listDirectory(jobExecutionContext.getOutputDir());
-//                        if (outputList.size() > 0) {
-//                            break;
-//                        }
-//                        retry--;
-//                        Thread.sleep(2000);
-//                    }
-//
-//                    if (outputList.size() == 0 || outputList.get(0).isEmpty() || outputList.size() > 1) {
-//                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
-//                        Set<String> strings = output.keySet();
-//                        outputArray.clear();
-//                        for (String key : strings) {
-//                            OutputDataObjectType actualParameter1 = (OutputDataObjectType) output.get(key);
-//                            if (DataType.URI == actualParameter1.getType()) {
-//                                String downloadFile = actualParameter1.getValue();
-//                                remoteCluster.scpFrom(downloadFile, outputDataDir);
-//                                String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-//                                String localFile = outputDataDir + File.separator + fileName;
-//                                jobExecutionContext.addOutputFile(localFile);
-//                                actualParameter1.setValue(localFile);
-//                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-//                                dataObjectType.setValue(localFile);
-//                                dataObjectType.setName(key);
-//                                dataObjectType.setType(DataType.URI);
-//                                outputArray.add(dataObjectType);
-//                            }else if (DataType.STDOUT == actualParameter.getType()) {
-//                                String fileName = localStdOutFile.getName();
-//                                String localFile = outputDataDir + File.separator + fileName;
-//                                jobExecutionContext.addOutputFile(localFile);
-//                                actualParameter.setValue(localFile);
-//                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-//                                dataObjectType.setValue(localFile);
-//                                dataObjectType.setName(key);
-//                                dataObjectType.setType(DataType.STDOUT);
-//                                outputArray.add(dataObjectType);
-//                            }else if (DataType.STDERR == actualParameter.getType()) {
-//                                String fileName = localStdErrFile.getName();
-//                                String localFile = outputDataDir + File.separator + fileName;
-//                                jobExecutionContext.addOutputFile(localFile);
-//                                actualParameter.setValue(localFile);
-//                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-//                                dataObjectType.setValue(localFile);
-//                                dataObjectType.setName(key);
-//                                dataObjectType.setType(DataType.STDERR);
-//                                outputArray.add(dataObjectType);
-//                            }
-//                        }
-//                        break;
-//                    } else if (outputList.size() == 1) {//FIXME: Ultrascan case
-//                        String valueList = outputList.get(0);
-//                        remoteCluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
-//                        String outputPath = outputDataDir + File.separator + valueList;
-//                        jobExecutionContext.addOutputFile(outputPath);
-//                        actualParameter.setValue(outputPath);
-//                        OutputDataObjectType dataObjectType = new OutputDataObjectType();
-//                        dataObjectType.setValue(outputPath);
-//                        dataObjectType.setName(paramName);
-//                        dataObjectType.setType(DataType.URI);
-//                        outputArray.add(dataObjectType);
-//                    }
-//                } else {
-//                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
-//                }
-//            }
-//            if (outputArray == null || outputArray.isEmpty()) {
-//                log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
-//                if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
-//                    throw new GFacHandlerException(
-//                            "Empty Output returned from the Application, Double check the application"
-//                                    + "and ApplicationDescriptor output Parameter Names");
-//                }
-//            }
-//            jobExecutionContext.setStandardError(localStdErrFile.getAbsolutePath());
-//            jobExecutionContext.setStandardOutput(localStdOutFile.getAbsolutePath());
-//            jobExecutionContext.setOutputDir(outputDataDir);
-//            status.setTransferState(TransferState.DOWNLOAD);
-//            detail.setTransferStatus(status);
-//            detail.setTransferDescription(outputDataDir);
-//            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//            experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-//
-//        } catch (Exception e) {
-//            try {
-//                status.setTransferState(TransferState.FAILED);
-//                detail.setTransferStatus(status);
-//                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//                StringWriter errors = new StringWriter();
-//                e.printStackTrace(new PrintWriter(errors));
-//                GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-//            } catch (Exception e1) {
-//                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-//            }
-//            throw new GFacHandlerException("Error in retrieving results", e);
-//        }
-//
-//    }
-//
-//    @Override
-//    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-//        // TODO: Auto generated method body.
-//    }
-//
-//    public void initProperties(Properties properties) throws GFacHandlerException {
-//
-//    }
-//}