You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by sh...@apache.org on 2015/06/16 21:37:40 UTC

[1/7] airavata git commit: Removed gsi related code

Repository: airavata
Updated Branches:
  refs/heads/master 922645376 -> d9b2df033


http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/X509Helper.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/X509Helper.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/X509Helper.java
deleted file mode 100644
index 08c3f67..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/X509Helper.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.util;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.bouncycastle.jce.provider.BouncyCastleProvider;
-
-
-import java.io.*;
-import java.security.*;
-import java.security.cert.CertificateException;
-import java.security.cert.CertificateFactory;
-import java.security.cert.CertificateParsingException;
-import java.security.cert.X509Certificate;
-import java.security.spec.InvalidKeySpecException;
-
-public class X509Helper {
-
-    static {
-        // parsing of RSA key fails without this
-        java.security.Security.addProvider(new BouncyCastleProvider());
-    }
-
-
-
-    public static KeyStore keyStoreFromPEM(String proxyFile,
-                                           String keyPassPhrase) throws IOException,
-            CertificateException,
-            NoSuchAlgorithmException,
-            InvalidKeySpecException,
-            KeyStoreException {
-        return keyStoreFromPEM(proxyFile,proxyFile,keyPassPhrase);
-    }
-
-    public static KeyStore keyStoreFromPEM(String certFile,
-                                           String keyFile,
-                                           String keyPassPhrase) throws IOException,
-                                                                        CertificateException,
-                                                                        NoSuchAlgorithmException,
-                                                                        InvalidKeySpecException,
-                                                                        KeyStoreException {
-        CertificateFactory cf = CertificateFactory.getInstance("X.509");
-        X509Certificate cert = (X509Certificate)cf.generateCertificate(new FileInputStream(certFile));
-        //System.out.println(cert.toString());
-
-        // this works for proxy files, too, since it skips over the certificate
-        BufferedReader reader = new BufferedReader(new FileReader(keyFile));
-        String line = null;
-        StringBuilder builder = new StringBuilder();
-        boolean inKey = false;
-        while((line=reader.readLine()) != null) {
-            if (line.contains("-----BEGIN RSA PRIVATE KEY-----")) {
-                inKey = true;
-            }
-            if (inKey) {
-                builder.append(line);
-                builder.append(System.getProperty("line.separator"));
-            }
-            if (line.contains("-----END RSA PRIVATE KEY-----")) {
-                inKey = false;
-            }
-        }
-        String privKeyPEM = builder.toString();
-        //System.out.println(privKeyPEM);
-
-        // using BouncyCastle
-//        PEMReader pemParser = new PEMReader(new StringReader(privKeyPEM));
-//        Object object = pemParser.readObject();
-//
-//        PrivateKey privKey = null;
-//        if(object instanceof KeyPair){
-//            privKey = ((KeyPair)object).getPrivate();
-//        }
-        // PEMParser from BouncyCastle is good for reading PEM files, but I didn't want to add that dependency
-        /*
-        // Base64 decode the data
-        byte[] encoded = javax.xml.bind.DatatypeConverter.parseBase64Binary(privKeyPEM);
-
-        // PKCS8 decode the encoded RSA private key
-        java.security.spec.PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(encoded);
-        KeyFactory kf = KeyFactory.getInstance("RSA");
-        PrivateKey privKey = kf.generatePrivate(keySpec);
-        //RSAPrivateKey privKey = (RSAPrivateKey)kf.generatePrivate(keySpec);
-        */
-        //System.out.println(privKey.toString());
-
-//        KeyStore keyStore = KeyStore.getInstance("PKCS12");
-//        keyStore.load(null,null);
-//
-//        KeyStore.PrivateKeyEntry entry =
-//            new KeyStore.PrivateKeyEntry(privKey,
-//                                         new java.security.cert.Certificate[] {(java.security.cert.Certificate)cert});
-//        KeyStore.PasswordProtection prot = new KeyStore.PasswordProtection(keyPassPhrase.toCharArray());
-//        keyStore.setEntry(cert.getSubjectX500Principal().getName(), entry, prot);
-
-//        return keyStore;
-        //TODO: Problem with BouncyCastle version used in gsissh 
-        throw new CertificateException("Method not implemented");
-
-    }
-
-
-    public static KeyStore trustKeyStoreFromCertDir() throws IOException,
-                                                             KeyStoreException,
-                                                             CertificateException,
-                                                             NoSuchAlgorithmException, ApplicationSettingsException {
-        return trustKeyStoreFromCertDir(ServerSettings.getSetting("trusted.cert.location"));
-    }
-
-    public static KeyStore trustKeyStoreFromCertDir(String certDir) throws IOException,
-                                                                           KeyStoreException,
-                                                                           CertificateException,
-                                                                           NoSuchAlgorithmException {
-        KeyStore ks = KeyStore.getInstance("JKS");
-        ks.load(null,null);
-
-        File dir = new File(certDir);
-        for(File file : dir.listFiles()) {
-            if (!file.isFile()) {
-                continue;
-            }
-            if (!file.getName().endsWith(".0")) {
-                continue;
-            }
-
-            try {
-                //System.out.println("reading file "+file.getName());
-                CertificateFactory cf = CertificateFactory.getInstance("X.509");
-                X509Certificate cert = (X509Certificate) cf.generateCertificate(new FileInputStream(file));
-                //System.out.println(cert.toString());
-
-                KeyStore.TrustedCertificateEntry entry = new KeyStore.TrustedCertificateEntry(cert);
-
-                ks.setEntry(cert.getSubjectX500Principal().getName(), entry, null);
-            } catch (KeyStoreException e) {
-            } catch (CertificateParsingException e) {
-                continue;
-            }
-
-        }
-
-        return ks;
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
index a5dc68c..b837997 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
@@ -42,7 +42,7 @@ import org.apache.airavata.gfac.core.provider.GFacProviderException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.GFacUtils;
 import org.apache.airavata.gfac.gsi.ssh.api.CommandExecutor;
-import org.apache.airavata.gfac.gsi.ssh.impl.StandardOutReader;
+import org.apache.airavata.gfac.impl.StandardOutReader;
 import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
 import org.apache.airavata.gfac.monitor.email.EmailMonitorFactory;
 import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
index fdcb0dd..665934e 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
@@ -21,7 +21,7 @@
 package org.apache.airavata.gfac.ssh.util;
 
 import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
+import org.apache.airavata.gfac.impl.HPCRemoteCluster;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
index 4932fcf..380dc5e 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
@@ -28,7 +28,7 @@ import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
 import org.apache.airavata.gfac.gsi.ssh.api.CommandExecutor;
 import org.apache.airavata.gfac.gsi.ssh.config.ConfigReader;
-import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
+import org.apache.airavata.gfac.impl.HPCRemoteCluster;
 import org.apache.airavata.gfac.gsi.ssh.impl.SystemCommandOutput;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyAuthentication;
 import org.apache.commons.io.IOUtils;

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
index f262930..b76fda5 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
@@ -25,7 +25,7 @@ import org.apache.airavata.gfac.core.JobDescriptor;
 import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
 import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
+import org.apache.airavata.gfac.impl.HPCRemoteCluster;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
 import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
index 6364940..4ed9962 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
@@ -24,7 +24,7 @@ import com.google.common.eventbus.EventBus;
 import com.google.common.eventbus.Subscribe;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
 import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
+import org.apache.airavata.gfac.impl.HPCRemoteCluster;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.common.utils.LocalEventPublisher;
 import org.apache.airavata.gfac.core.JobDescriptor;


[7/7] airavata git commit: Removed gsi related code

Posted by sh...@apache.org.
Removed gsi related code


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/d9b2df03
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/d9b2df03
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/d9b2df03

Branch: refs/heads/master
Commit: d9b2df0336199e7641e1af5da5e0d2b4908bf4ac
Parents: 9226453
Author: Shameera Rathanyaka <sh...@gmail.com>
Authored: Tue Jun 16 15:37:32 2015 -0400
Committer: Shameera Rathanyaka <sh...@gmail.com>
Committed: Tue Jun 16 15:37:32 2015 -0400

----------------------------------------------------------------------
 .../core/monitor/GfacInternalStatusUpdator.java | 103 ---
 .../airavata/gfac/core/monitor/JobMonitor.java  |  39 +
 .../airavata/gfac/gsi/ssh/GSSContextX509.java   | 210 -----
 .../gfac/gsi/ssh/api/CommandExecutor.java       | 295 -------
 .../gsi/ssh/api/job/LSFJobConfiguration.java    | 124 ---
 .../gfac/gsi/ssh/api/job/LSFOutputParser.java   | 132 ----
 .../gsi/ssh/api/job/PBSJobConfiguration.java    | 121 ---
 .../gfac/gsi/ssh/api/job/PBSOutputParser.java   | 218 ------
 .../gsi/ssh/api/job/SlurmJobConfiguration.java  | 119 ---
 .../gfac/gsi/ssh/api/job/SlurmOutputParser.java | 192 -----
 .../gsi/ssh/api/job/UGEJobConfiguration.java    | 121 ---
 .../gfac/gsi/ssh/api/job/UGEOutputParser.java   | 190 -----
 .../gfac/gsi/ssh/config/ConfigReader.java       |  76 --
 .../ssh/impl/DefaultJobSubmissionListener.java  |  43 -
 .../gsi/ssh/impl/GSISSHAbstractCluster.java     | 777 -------------------
 .../gfac/gsi/ssh/impl/HPCRemoteCluster.java     | 333 --------
 .../airavata/gfac/gsi/ssh/impl/SSHUserInfo.java |  63 --
 .../gfac/gsi/ssh/impl/StandardOutReader.java    |  79 --
 .../gfac/gsi/ssh/impl/SystemCommandOutput.java  |  78 --
 .../DefaultPasswordAuthenticationInfo.java      |  48 --
 .../DefaultPublicKeyAuthentication.java         |  68 --
 .../DefaultPublicKeyFileAuthentication.java     |  70 --
 .../MyProxyAuthenticationInfo.java              | 108 ---
 .../gfac/gsi/ssh/jsch/ExtendedJSch.java         |  64 --
 .../gsi/ssh/listener/JobSubmissionListener.java |  81 --
 .../airavata/gfac/gsi/ssh/util/CommonUtils.java |  83 --
 .../ssh/util/SSHAPIUIKeyboardInteractive.java   |  73 --
 .../gsi/ssh/util/SSHKeyPasswordHandler.java     |  68 --
 .../airavata/gfac/gsi/ssh/util/SSHUtils.java    | 760 ------------------
 .../handler/GSISSHDirectorySetupHandler.java    | 118 ---
 .../gfac/gsissh/handler/GSISSHInputHandler.java | 213 -----
 .../gsissh/handler/GSISSHOutputHandler.java     | 323 --------
 .../gsissh/handler/NewGSISSHOutputHandler.java  |  83 --
 .../gsissh/provider/impl/GSISSHProvider.java    | 344 --------
 .../gsissh/security/GSISecurityContext.java     |  67 --
 .../security/TokenizedMyProxyAuthInfo.java      | 304 --------
 .../gfac/gsissh/util/GFACGSISSHUtils.java       | 367 ---------
 .../org/apache/airavata/gfac/impl/Factory.java  |  53 ++
 .../airavata/gfac/impl/HPCRemoteCluster.java    | 332 ++++++++
 .../org/apache/airavata/gfac/impl/SSHUtils.java |   1 -
 .../airavata/gfac/impl/StandardOutReader.java   |  79 ++
 .../gfac/impl/job/LSFJobConfiguration.java      | 123 +++
 .../airavata/gfac/impl/job/LSFOutputParser.java | 133 ++++
 .../gfac/impl/job/PBSJobConfiguration.java      | 121 +++
 .../airavata/gfac/impl/job/PBSOutputParser.java | 219 ++++++
 .../gfac/impl/job/SlurmJobConfiguration.java    | 119 +++
 .../gfac/impl/job/SlurmOutputParser.java        | 193 +++++
 .../gfac/impl/job/UGEJobConfiguration.java      | 121 +++
 .../airavata/gfac/impl/job/UGEOutputParser.java | 191 +++++
 .../airavata/gfac/monitor/HPCMonitorID.java     | 107 ---
 .../airavata/gfac/monitor/HostMonitorData.java  |  88 ---
 .../airavata/gfac/monitor/UserMonitorData.java  |  76 --
 .../command/ExperimentCancelRequest.java        |  38 -
 .../gfac/monitor/command/TaskCancelRequest.java |  52 --
 .../monitor/core/AiravataAbstractMonitor.java   |  37 -
 .../gfac/monitor/core/MessageParser.java        |  43 -
 .../airavata/gfac/monitor/core/Monitor.java     |  30 -
 .../airavata/gfac/monitor/core/PullMonitor.java |  64 --
 .../airavata/gfac/monitor/core/PushMonitor.java |  60 --
 .../gfac/monitor/email/EmailBasedMonitor.java   |  18 +-
 .../gfac/monitor/email/EmailMonitorFactory.java |  47 --
 .../handlers/GridPullMonitorHandler.java        | 139 ----
 .../handlers/GridPushMonitorHandler.java        | 107 ---
 .../monitor/impl/pull/qstat/HPCPullMonitor.java | 471 -----------
 .../impl/pull/qstat/ResourceConnection.java     | 154 ----
 .../monitor/impl/push/amqp/AMQPMonitor.java     | 280 -------
 .../monitor/impl/push/amqp/BasicConsumer.java   |  87 ---
 .../impl/push/amqp/ComputingActivity.java       |  19 -
 .../impl/push/amqp/JSONMessageParser.java       |  77 --
 .../impl/push/amqp/SimpleJobFinishConsumer.java |  86 --
 .../impl/push/amqp/UnRegisterWorker.java        |  67 --
 .../gfac/monitor/util/AMQPConnectionUtil.java   |  80 --
 .../airavata/gfac/monitor/util/CommonUtils.java | 280 -------
 .../airavata/gfac/monitor/util/X509Helper.java  | 164 ----
 .../gfac/ssh/provider/impl/SSHProvider.java     |   2 +-
 .../airavata/gfac/ssh/util/GFACSSHUtils.java    |   2 +-
 .../impl/DefaultSSHApiTestWithMyProxyAuth.java  |   2 +-
 .../gfac/ssh/impl/VanilaTestWithSSHAuth.java    |   2 +-
 .../apache/airavata/job/AMQPMonitorTest.java    |   2 +-
 79 files changed, 1744 insertions(+), 9177 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
deleted file mode 100644
index 84d72fa..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/GfacInternalStatusUpdator.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.core.monitor;
-
-import com.google.common.eventbus.Subscribe;
-import org.apache.airavata.common.utils.AiravataZKUtils;
-import org.apache.airavata.common.utils.Constants;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.common.utils.listener.AbstractActivityListener;
-import org.apache.airavata.gfac.core.monitor.state.GfacExperimentStateChangeRequest;
-import org.apache.airavata.gfac.core.states.GfacExperimentState;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.Watcher;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.Stat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-
-public class GfacInternalStatusUpdator implements AbstractActivityListener, Watcher {
-    private final static Logger logger = LoggerFactory.getLogger(GfacInternalStatusUpdator.class);
-
-    private CuratorFramework curatorClient;
-
-    private static Integer mutex = -1;
-
-    @Subscribe
-    public void updateZK(GfacExperimentStateChangeRequest statusChangeRequest) throws Exception {
-        logger.info("Gfac internal state changed to: " + statusChangeRequest.getState().toString());
-        MonitorID monitorID = statusChangeRequest.getMonitorID();
-        String experimentNode = ServerSettings.getSetting(Constants.ZOOKEEPER_GFAC_EXPERIMENT_NODE, "/gfac-experiments");
-        String experimentPath = experimentNode + File.separator + ServerSettings.getSetting(Constants.ZOOKEEPER_GFAC_SERVER_NAME)
-                + File.separator + statusChangeRequest.getMonitorID().getExperimentID();
-        Stat exists = null;
-        if(!(GfacExperimentState.COMPLETED.equals(statusChangeRequest.getState()) || GfacExperimentState.FAILED.equals(statusChangeRequest.getState()))) {
-            exists = curatorClient.checkExists().forPath(experimentPath);
-            if (exists == null) {
-                logger.error("ZK path: " + experimentPath + " does not exists !!");
-                return;
-            }
-            Stat state = curatorClient.checkExists().forPath(experimentPath + File.separator + AiravataZKUtils.ZK_EXPERIMENT_STATE_NODE);
-            if (state == null) {
-                // state znode has to be created
-                curatorClient.create().withMode(CreateMode.PERSISTENT).withACL(ZooDefs.Ids.OPEN_ACL_UNSAFE).
-                        forPath(experimentPath + File.separator + AiravataZKUtils.ZK_EXPERIMENT_STATE_NODE,
-                                String.valueOf(statusChangeRequest.getState().getValue()).getBytes());
-            } else {
-                curatorClient.setData().withVersion(state.getVersion()).forPath(experimentPath + File.separator + AiravataZKUtils.ZK_EXPERIMENT_STATE_NODE,
-                        String.valueOf(statusChangeRequest.getState().getValue()).getBytes());
-            }
-        }
-        switch (statusChangeRequest.getState()) {
-            case COMPLETED:
-                logger.info("Experiment Completed, So removing the ZK entry for the experiment" + monitorID.getExperimentID());
-                logger.info("Zookeeper experiment Path: " + experimentPath);
-                break;
-            case FAILED:
-                logger.info("Experiment Failed, So removing the ZK entry for the experiment" + monitorID.getExperimentID());
-                logger.info("Zookeeper experiment Path: " + experimentPath);
-                break;
-            default:
-        }
-    }
-
-    public void setup(Object... configurations) {
-        for (Object configuration : configurations) {
-            if (configuration instanceof CuratorFramework) {
-                this.curatorClient = (CuratorFramework) configuration;
-            }
-        }
-    }
-
-    public void process(WatchedEvent watchedEvent) {
-        logger.info(watchedEvent.getPath());
-        synchronized (mutex) {
-            Event.KeeperState state = watchedEvent.getState();
-            if (state == Event.KeeperState.SyncConnected) {
-                mutex.notify();
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/JobMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/JobMonitor.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/JobMonitor.java
new file mode 100644
index 0000000..64a9838
--- /dev/null
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/JobMonitor.java
@@ -0,0 +1,39 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.airavata.gfac.core.monitor;
+
+import org.apache.airavata.gfac.core.context.ProcessContext;
+
+public interface JobMonitor {
+
+	/**
+	 * Start monitor jobId on remote computer resource.
+	 * @param jobId
+	 * @param processContext
+	 */
+	public void monitor(String jobId, ProcessContext processContext);
+
+	/**
+	 * Stop monitoring for given jobId
+	 * @param jobId
+	 */
+	public void stopMonitor(String jobId);
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/GSSContextX509.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/GSSContextX509.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/GSSContextX509.java
deleted file mode 100644
index a6eea6f..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/GSSContextX509.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh;
-
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-
-import org.globus.common.CoGProperties;
-import org.globus.gsi.gssapi.auth.HostAuthorization;
-import org.gridforum.jgss.ExtendedGSSCredential;
-import org.gridforum.jgss.ExtendedGSSManager;
-import org.ietf.jgss.GSSContext;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
-import org.ietf.jgss.GSSName;
-import org.ietf.jgss.MessageProp;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.jcraft.jsch.JSchException;
-
-/**
- * This class is based on GSSContextKrb5; it substitutes the globus
- * ExtendedGSSManager and uses the SecurityUtils method to get the credential if
- * one is not passed in from memory.
- *
- */
-public class GSSContextX509 implements com.jcraft.jsch.GSSContext {
-
-    private GSSContext context = null;
-    private GSSCredential credential;
-    private static final Logger logger = LoggerFactory.getLogger(GSSContextX509.class);
-
-    public void create(String user, String host) throws JSchException {
-        try {
-//			ExtendedGSSManager manager = (ExtendedGSSManager) ExtendedGSSManager.getInstance();
-
-            if (credential == null) {
-                try {
-                    credential = getCredential();
-                } catch (SecurityException t) {
-                    System.out.printf("Could not get proxy: %s: %s\n", t.getClass().getSimpleName(), t.getMessage());
-                    throw new JSchException(t.toString());
-                }
-            }
-
-            String cname = host;
-
-            try {
-                cname = InetAddress.getByName(cname).getCanonicalHostName();
-            } catch (UnknownHostException e) {
-            }
-
-            GSSName name = HostAuthorization.getInstance().getExpectedName(credential, cname);
-
-//			context = manager.createContext(name, null, credential, GSSContext.DEFAULT_LIFETIME);
-//
-//			// RFC4462 3.4. GSS-API Session
-//			//
-//			// When calling GSS_Init_sec_context(), the client MUST set
-//			// integ_req_flag to "true" to request that per-message integrity
-//			// protection be supported for this context. In addition,
-//			// deleg_req_flag MAY be set to "true" to request access delegation,
-//			// if
-//			// requested by the user.
-//			//
-//			// Since the user authentication process by its nature authenticates
-//			// only the client, the setting of mutual_req_flag is not needed for
-//			// this process. This flag SHOULD be set to "false".
-//
-//			// TODO: OpenSSH's sshd does accept 'false' for mutual_req_flag
-//			// context.requestMutualAuth(false);
-//			context.requestMutualAuth(true);
-//			context.requestConf(true);
-//			context.requestInteg(true); // for MIC
-//			context.requestCredDeleg(true);
-//			context.requestAnonymity(false);
-
-//            context = new BCGSSContextImpl(name, (GlobusGSSCredentialImpl) credential);
-//            context.requestLifetime(GSSCredential.DEFAULT_LIFETIME);
-//            context.requestCredDeleg(true);
-//            context.requestMutualAuth(true);
-//            context.requestReplayDet(true);
-//            context.requestSequenceDet(true);
-//            context.requestConf(false);
-//            context.requestInteg(true);
-//            ((ExtendedGSSContext)context).setOption(GSSConstants.DELEGATION_TYPE, GSIConstants.DELEGATION_TYPE_FULL);
-
-            return;
-        } catch (GSSException ex) {
-            throw new JSchException(ex.toString());
-        }
-    }
-
-    private static GSSCredential getProxy() {
-        return getProxy(null, GSSCredential.DEFAULT_LIFETIME);
-    }
-
-    /**
-     * @param x509_USER_PROXY
-     *            path to the proxy.
-     * @param credentialLifetime
-     *            in seconds.
-     * @return valid credential.
-     *             if proxy task throws exception (or if proxy cannot be found).
-     */
-    private static GSSCredential getProxy(String x509_USER_PROXY, int credentialLifetime) throws SecurityException {
-        if (x509_USER_PROXY == null)
-            x509_USER_PROXY = System.getProperty("x509.user.proxy");
-
-//		if (x509_USER_PROXY == null) {
-//			SystemUtils.envToProperties();
-//			x509_USER_PROXY = System.getProperty("x509.user.proxy");
-//		}
-
-        if (x509_USER_PROXY == null || "".equals(x509_USER_PROXY))
-            x509_USER_PROXY = CoGProperties.getDefault().getProxyFile();
-
-        if (x509_USER_PROXY == null)
-            throw new SecurityException("could not get credential; no location defined");
-
-        ExtendedGSSManager manager = (ExtendedGSSManager) ExtendedGSSManager.getInstance();
-
-        // file...load file into a buffer
-        try {
-            File f = new File(x509_USER_PROXY);
-            byte[] data = new byte[(int) f.length()];
-            FileInputStream in = new FileInputStream(f);
-            // read in the credential data
-            in.read(data);
-            in.close();
-            return manager.createCredential(data, ExtendedGSSCredential.IMPEXP_OPAQUE, credentialLifetime, null, // use
-                    // default
-                    // mechanism
-                    // -
-                    // GSI
-                    GSSCredential.INITIATE_AND_ACCEPT);
-        } catch (Throwable t) {
-            throw new SecurityException("could not get credential from " + x509_USER_PROXY, t);
-        }
-    }
-
-    public boolean isEstablished() {
-        // this must check to see if the call returned GSS_S_COMPLETE
-        if (context != null){
-            return context.isEstablished();
-        }
-        return false;
-    }
-
-    public byte[] init(byte[] token, int s, int l) throws JSchException {
-        try {
-            if (context != null){
-                return context.initSecContext(token, s, l);
-            }else {
-                throw new JSchException("Context is null..");
-            }
-        } catch (GSSException ex) {
-            throw new JSchException(ex.toString());
-        }
-    }
-
-    public byte[] getMIC(byte[] message, int s, int l) {
-        try {
-            MessageProp prop = new MessageProp(0, false);
-            return context.getMIC(message, s, l, prop);
-        } catch (GSSException ex) {
-            logger.error(ex.getMessage(), ex);
-            return null;
-        }
-    }
-
-    public void dispose() {
-        try {
-            context.dispose();
-        } catch (GSSException ex) {
-        }
-    }
-
-    public void setCredential(GSSCredential credential) {
-        this.credential = credential;
-    }
-
-    public GSSCredential getCredential() {
-        return credential;
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/CommandExecutor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/CommandExecutor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/CommandExecutor.java
deleted file mode 100644
index bf306ef..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/CommandExecutor.java
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api;
-
-import com.jcraft.jsch.Channel;
-import com.jcraft.jsch.ChannelExec;
-import com.jcraft.jsch.ExtendedSession;
-import com.jcraft.jsch.GSISSHIdentityFile;
-import com.jcraft.jsch.GSISSHIdentityRepository;
-import com.jcraft.jsch.Identity;
-import com.jcraft.jsch.JSch;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.Session;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gfac.core.authentication.SSHKeyAuthentication;
-import org.apache.airavata.gfac.core.authentication.SSHPublicKeyAuthentication;
-import org.apache.airavata.gfac.core.authentication.SSHPublicKeyFileAuthentication;
-import org.apache.airavata.gfac.core.cluster.CommandInfo;
-import org.apache.airavata.gfac.core.cluster.CommandOutput;
-import org.apache.airavata.gfac.core.authentication.SSHPasswordAuthentication;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.gsi.ssh.config.ConfigReader;
-import org.apache.airavata.gfac.gsi.ssh.jsch.ExtendedJSch;
-import org.apache.airavata.gfac.gsi.ssh.util.SSHAPIUIKeyboardInteractive;
-import org.apache.airavata.gfac.gsi.ssh.util.SSHKeyPasswordHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * This is a generic class which take care of command execution
- * in a shell, this is used through out the other places of the API.
- */
-public class CommandExecutor {
-    static {
-        JSch.setConfig("gssapi-with-mic.x509", "org.apache.airavata.gfac.ssh.GSSContextX509");
-        JSch.setConfig("userauth.gssapi-with-mic", "com.jcraft.jsch.UserAuthGSSAPIWithMICGSSCredentials");
-        JSch jSch = new JSch();
-    }
-
-    private static final Logger log = LoggerFactory.getLogger(CommandExecutor.class);
-    public static final String X509_CERT_DIR = "X509_CERT_DIR";
-
-    /**
-     * This will execute the given command with given session and session is not closed at the end.
-     *
-     * @param commandInfo
-     * @param session
-     * @param commandOutput
-     * @throws SSHApiException
-     */
-    public static Session executeCommand(CommandInfo commandInfo, Session session,
-                                         CommandOutput commandOutput) throws SSHApiException {
-
-        String command = commandInfo.getCommand();
-
-        Channel channel = null;
-        try {
-            if (!session.isConnected()) {
-                session.connect();
-            }
-            channel = session.openChannel("exec");
-            ((ChannelExec) channel).setCommand(command);
-        } catch (JSchException e) {
-//            session.disconnect();
-
-            throw new SSHApiException("Unable to execute command - ", e);
-        }
-
-        channel.setInputStream(null);
-        ((ChannelExec) channel).setErrStream(commandOutput.getStandardError());
-        try {
-            channel.connect();
-        } catch (JSchException e) {
-
-            channel.disconnect();
-//            session.disconnect();
-            throw new SSHApiException("Unable to retrieve command output. Command - " + command, e);
-        }
-
-
-        commandOutput.onOutput(channel);
-        //Only disconnecting the channel, session can be reused
-        channel.disconnect();
-        return session;
-    }
-
-    /**
-     * This will not reuse any session, it will create the session and close it at the end
-     *
-     * @param commandInfo        Encapsulated information about command. E.g :- executable name
-     *                           parameters etc ...
-     * @param serverInfo         The SSHing server information.
-     * @param authenticationInfo Security data needs to be communicated with remote server.
-     * @param commandOutput      The output of the command.
-     * @param configReader       configuration required for ssh/gshissh connection
-     * @throws SSHApiException   throw exception when error occurs
-     */
-    public static void executeCommand(CommandInfo commandInfo, ServerInfo serverInfo,
-                                      AuthenticationInfo authenticationInfo,
-                                      CommandOutput commandOutput, ConfigReader configReader) throws SSHApiException {
-
-        if (authenticationInfo instanceof GSIAuthenticationInfo) {
-            System.setProperty(X509_CERT_DIR, (String) ((GSIAuthenticationInfo)authenticationInfo).getProperties().
-                    get("X509_CERT_DIR"));
-        }
-
-
-        JSch jsch = new ExtendedJSch();
-
-        log.debug("Connecting to server - " + serverInfo.getHost() + ":" + serverInfo.getPort() + " with user name - "
-                + serverInfo.getUserName());
-
-        Session session;
-
-        try {
-            session = jsch.getSession(serverInfo.getUserName(), serverInfo.getHost(), serverInfo.getPort());
-        } catch (JSchException e) {
-            throw new SSHApiException("An exception occurred while creating SSH session." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        java.util.Properties config = configReader.getProperties();
-        session.setConfig(config);
-
-        //=============================================================
-        // Handling vanilla SSH pieces
-        //=============================================================
-        if (authenticationInfo instanceof SSHPasswordAuthentication) {
-            String password = ((SSHPasswordAuthentication) authenticationInfo).
-                    getPassword(serverInfo.getUserName(), serverInfo.getHost());
-
-            session.setUserInfo(new SSHAPIUIKeyboardInteractive(password));
-
-            // TODO figure out why we need to set password to session
-            session.setPassword(password);
-
-        } else if (authenticationInfo instanceof SSHPublicKeyFileAuthentication) {
-            SSHPublicKeyFileAuthentication sshPublicKeyFileAuthentication
-                    = (SSHPublicKeyFileAuthentication)authenticationInfo;
-
-            String privateKeyFile = sshPublicKeyFileAuthentication.
-                    getPrivateKeyFile(serverInfo.getUserName(), serverInfo.getHost());
-
-            logDebug("The private key file for vanilla SSH " + privateKeyFile);
-
-            String publicKeyFile = sshPublicKeyFileAuthentication.
-                    getPrivateKeyFile(serverInfo.getUserName(), serverInfo.getHost());
-
-            logDebug("The public key file for vanilla SSH " + publicKeyFile);
-
-            Identity identityFile;
-
-            try {
-                identityFile = GSISSHIdentityFile.newInstance(privateKeyFile, null, jsch);
-            } catch (JSchException e) {
-                throw new SSHApiException("An exception occurred while initializing keys using files. " +
-                        "(private key and public key)." +
-                        "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                        " connecting user name - "
-                        + serverInfo.getUserName() + " private key file - " + privateKeyFile + ", public key file - " +
-                        publicKeyFile, e);
-            }
-
-            // Add identity to identity repository
-            GSISSHIdentityRepository identityRepository = new GSISSHIdentityRepository(jsch);
-            identityRepository.add(identityFile);
-
-            // Set repository to session
-            session.setIdentityRepository(identityRepository);
-
-            // Set the user info
-            SSHKeyPasswordHandler sshKeyPasswordHandler
-                    = new SSHKeyPasswordHandler((SSHKeyAuthentication)authenticationInfo);
-
-            session.setUserInfo(sshKeyPasswordHandler);
-
-        } else if (authenticationInfo instanceof SSHPublicKeyAuthentication) {
-
-            SSHPublicKeyAuthentication sshPublicKeyAuthentication
-                    = (SSHPublicKeyAuthentication)authenticationInfo;
-
-            Identity identityFile;
-
-            try {
-                String name = serverInfo.getUserName() + "_" + serverInfo.getHost();
-                identityFile = GSISSHIdentityFile.newInstance(name,
-                        sshPublicKeyAuthentication.getPrivateKey(serverInfo.getUserName(), serverInfo.getHost()),
-                        sshPublicKeyAuthentication.getPublicKey(serverInfo.getUserName(), serverInfo.getHost()), jsch);
-            } catch (JSchException e) {
-                throw new SSHApiException("An exception occurred while initializing keys using byte arrays. " +
-                        "(private key and public key)." +
-                        "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                        " connecting user name - "
-                        + serverInfo.getUserName(), e);
-            }
-
-            // Add identity to identity repository
-            GSISSHIdentityRepository identityRepository = new GSISSHIdentityRepository(jsch);
-            identityRepository.add(identityFile);
-
-            // Set repository to session
-            session.setIdentityRepository(identityRepository);
-
-            // Set the user info
-            SSHKeyPasswordHandler sshKeyPasswordHandler
-                    = new SSHKeyPasswordHandler((SSHKeyAuthentication)authenticationInfo);
-
-            session.setUserInfo(sshKeyPasswordHandler);
-
-        }
-
-        // Not a good way, but we dont have any choice
-        if (session instanceof ExtendedSession) {
-            if (authenticationInfo instanceof GSIAuthenticationInfo) {
-                ((ExtendedSession) session).setAuthenticationInfo((GSIAuthenticationInfo)authenticationInfo);
-            }
-        }
-
-        try {
-            session.connect();
-        } catch (JSchException e) {
-            throw new SSHApiException("An exception occurred while connecting to server." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        String command = commandInfo.getCommand();
-
-        Channel channel;
-        try {
-            channel = session.openChannel("exec");
-            ((ChannelExec) channel).setCommand(command);
-        } catch (JSchException e) {
-//            session.disconnect();
-
-            throw new SSHApiException("Unable to execute command - " + command +
-                    " on server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-
-        channel.setInputStream(null);
-        ((ChannelExec) channel).setErrStream(commandOutput.getStandardError());
-
-        try {
-            channel.connect();
-        } catch (JSchException e) {
-
-            channel.disconnect();
-//            session.disconnect();
-
-            throw new SSHApiException("Unable to retrieve command output. Command - " + command +
-                    " on server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        commandOutput.onOutput(channel);
-
-        channel.disconnect();
-//        session.disconnect();
-    }
-
-    private static void logDebug(String message) {
-        if (log.isDebugEnabled()) {
-            log.debug(message);
-        }
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFJobConfiguration.java
deleted file mode 100644
index 9e2a913..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFJobConfiguration.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.AbstractJobManagerConfiguration;
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-import org.apache.commons.io.FilenameUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-
-public class LSFJobConfiguration implements JobManagerConfiguration {
-    private final static Logger logger = LoggerFactory.getLogger(LSFJobConfiguration.class);
-
-    private String jobDescriptionTemplateName;
-
-    private String scriptExtension;
-
-    private String installedPath;
-
-    private OutputParser parser;
-
-    public LSFJobConfiguration(){
-        // this can be used to construct and use setter methods to set all the params in order
-    }
-    public LSFJobConfiguration(String jobDescriptionTemplateName,
-                                 String scriptExtension,String installedPath,OutputParser parser) {
-        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
-        this.scriptExtension = scriptExtension;
-        this.parser = parser;
-        if (installedPath.endsWith("/") || installedPath.isEmpty()) {
-            this.installedPath = installedPath;
-        } else {
-            this.installedPath = installedPath + "/";
-        }
-    }
-
-    @Override
-    public RawCommandInfo getCancelCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "bkill " + jobID);
-    }
-
-    @Override
-    public String getJobDescriptionTemplateName() {
-        return jobDescriptionTemplateName;
-    }
-
-    @Override
-    public RawCommandInfo getMonitorCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "bjobs " + jobID);
-    }
-
-    @Override
-    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
-        return new RawCommandInfo(this.installedPath + "bjobs -u " + userName);
-    }
-
-    @Override
-    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
-        return new RawCommandInfo(this.installedPath + "bjobs -J " + jobName);
-    }
-
-    @Override
-    public String getScriptExtension() {
-        return scriptExtension;
-    }
-
-    @Override
-    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
-        return new RawCommandInfo(this.installedPath + "bsub < " +
-                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
-    }
-
-    @Override
-    public OutputParser getParser() {
-        return parser;
-    }
-
-    public void setParser(OutputParser parser) {
-        this.parser = parser;
-    }
-
-    @Override
-    public String getInstalledPath() {
-        return installedPath;
-    }
-
-
-    @Override
-    public String getBaseCancelCommand() {
-        return "bkill";
-    }
-
-    @Override
-    public String getBaseMonitorCommand() {
-        return "bjobs";
-    }
-
-    @Override
-    public String getBaseSubmitCommand() {
-        return "bsub";
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFOutputParser.java
deleted file mode 100644
index 76012d6..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/LSFOutputParser.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class LSFOutputParser implements OutputParser {
-    private final static Logger logger = LoggerFactory.getLogger(LSFOutputParser.class);
-
-    @Override
-    public void parseSingleJob(JobDescriptor jobDescriptor, String rawOutput) throws SSHApiException {
-        logger.debug(rawOutput);
-        //todo we need to implement this but we are not using it airavata runtime
-        // if someone is using the gsissh as a tool this will be useful to get a descriptive information about a single job
-    }
-
-    @Override
-    public String parseJobSubmission(String rawOutput) throws SSHApiException {
-        logger.debug(rawOutput);
-        return rawOutput.substring(rawOutput.indexOf("<")+1,rawOutput.indexOf(">"));
-    }
-
-    @Override
-    public JobStatus parseJobStatus(String jobID, String rawOutput) throws SSHApiException {
-        boolean jobFount = false;
-        logger.debug(rawOutput);
-        //todo this is not used anymore
-        return JobStatus.C;
-    }
-
-    @Override
-    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws SSHApiException {
-        logger.debug(rawOutput);
-
-        String[]    info = rawOutput.split("\n");
-//        int lastStop = 0;
-        for (String jobID : statusMap.keySet()) {
-            String jobName = jobID.split(",")[1];
-            boolean found = false;
-            for (int i = 0; i < info.length; i++) {
-                if (info[i].contains(jobName.substring(0,8))) {
-                    // now starts processing this line
-                    logger.info(info[i]);
-                    String correctLine = info[i];
-                    String[] columns = correctLine.split(" ");
-                    List<String> columnList = new ArrayList<String>();
-                    for (String s : columns) {
-                        if (!"".equals(s)) {
-                            columnList.add(s);
-                        }
-                    }
-//                    lastStop = i + 1;
-                    try {
-                        statusMap.put(jobID, JobStatus.valueOf(columnList.get(2)));
-                    }catch(IndexOutOfBoundsException e){
-                        statusMap.put(jobID, JobStatus.valueOf("U"));
-                    }
-                    found = true;
-                    break;
-                }
-            }
-            if(!found)
-                logger.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobID.split(",")[0]);
-        }
-    }
-
-    @Override
-    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
-        String regJobId = "jobId";
-        Pattern pattern = Pattern.compile("(?=(?<" + regJobId + ">\\d+)\\s+\\w+\\s+" + jobName + ")"); // regex - look ahead and match
-        if (rawOutput != null) {
-            Matcher matcher = pattern.matcher(rawOutput);
-            if (matcher.find()) {
-                return matcher.group(regJobId);
-            } else {
-                logger.error("No match is found for JobName");
-                return null;
-            }
-        } else {
-            logger.error("Error: RawOutput shouldn't be null");
-            return null;
-        }
-    }
-
-    public static void main(String[] args) {
-        String test = "Job <2477982> is submitted to queue <short>.";
-        System.out.println(test.substring(test.indexOf("<")+1, test.indexOf(">")));
-        String test1 = "JOBID   USER    STAT  QUEUE      FROM_HOST   EXEC_HOST   JOB_NAME   SUBMIT_TIME\n" +
-                "2636607 lg11w   RUN   long       ghpcc06     c11b02      *069656647 Mar  7 00:58\n" +
-                "2636582 lg11w   RUN   long       ghpcc06     c02b01      2134490944 Mar  7 00:48";
-        Map<String, JobStatus> statusMap = new HashMap<String, JobStatus>();
-        statusMap.put("2477983,2134490944", JobStatus.U);
-        LSFOutputParser lsfOutputParser = new LSFOutputParser();
-        try {
-            lsfOutputParser.parseJobStatuses("cjh", statusMap, test1);
-        } catch (SSHApiException e) {
-            logger.error(e.getMessage(), e);
-        }
-        System.out.println(statusMap.get("2477983,2134490944"));
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSJobConfiguration.java
deleted file mode 100644
index c5be412..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSJobConfiguration.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-import org.apache.commons.io.FilenameUtils;
-
-import java.io.File;
-
-public class PBSJobConfiguration implements JobManagerConfiguration {
-
-    private String jobDescriptionTemplateName;
-
-    private String scriptExtension;
-
-    private String installedPath;
-
-    private OutputParser parser;
-
-    public PBSJobConfiguration() {
-        // this can be used to construct and use setter methods to set all the params in order
-    }
-
-    public PBSJobConfiguration(String jobDescriptionTemplateName,
-                               String scriptExtension, String installedPath, OutputParser parser) {
-        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
-        this.scriptExtension = scriptExtension;
-        this.parser = parser;
-        if (installedPath.endsWith("/")) {
-            this.installedPath = installedPath;
-        } else {
-            this.installedPath = installedPath + "/";
-        }
-    }
-
-    public RawCommandInfo getCancelCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "qdel " + jobID);
-    }
-
-    public String getJobDescriptionTemplateName() {
-        return jobDescriptionTemplateName;
-    }
-
-    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
-        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
-    }
-
-    public RawCommandInfo getMonitorCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "qstat -f " + jobID);
-    }
-
-    public String getScriptExtension() {
-        return scriptExtension;
-    }
-
-    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
-        return new RawCommandInfo(this.installedPath + "qsub " +
-                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
-    }
-
-    public String getInstalledPath() {
-        return installedPath;
-    }
-
-    public void setInstalledPath(String installedPath) {
-        this.installedPath = installedPath;
-    }
-
-    public OutputParser getParser() {
-        return parser;
-    }
-
-    public void setParser(OutputParser parser) {
-        this.parser = parser;
-    }
-
-    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
-        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
-    }
-
-    @Override
-    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
-        // For PBS there is no option to get jobDetails by JobName, so we search with userName
-        return new RawCommandInfo(this.installedPath + "qstat -u " + userName + " -f  | grep \"Job_Name = " + jobName + "\" -B1");
-    }
-
-    @Override
-    public String  getBaseCancelCommand() {
-        return "qdel";
-    }
-
-    @Override
-    public String  getBaseMonitorCommand() {
-        return "qstat";
-    }
-
-    @Override
-    public String getBaseSubmitCommand() {
-        return "qsub ";
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSOutputParser.java
deleted file mode 100644
index 15e2405..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/PBSOutputParser.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class PBSOutputParser implements OutputParser {
-    private static final Logger log = LoggerFactory.getLogger(PBSOutputParser.class);
-
-    public void parseSingleJob(JobDescriptor jobDescriptor, String rawOutput) {
-        log.debug(rawOutput);
-        String[] info = rawOutput.split("\n");
-        String[] line;
-        for (int i = 0; i < info.length; i++) {
-            if (info[i].contains("=")) {
-                line = info[i].split("=", 2);
-            } else {
-                line = info[i].split(":", 2);
-            }
-            if (line.length >= 2) {
-                String header = line[0].trim();
-                log.debug("Header = " + header);
-                String value = line[1].trim();
-                log.debug("value = " + value);
-
-                if (header.equals("Variable_List")) {
-                    while (info[i + 1].startsWith("\t")) {
-                        value += info[i + 1];
-                        i++;
-                    }
-                    value = value.replaceAll("\t", "");
-                    jobDescriptor.setVariableList(value);
-                } else if ("Job Id".equals(header)) {
-                    jobDescriptor.setJobID(value);
-                } else if ("Job_Name".equals(header)) {
-                    jobDescriptor.setJobName(value);
-                } else if ("Account_Name".equals(header)) {
-                    jobDescriptor.setAcountString(value);
-                } else if ("job_state".equals(header)) {
-                    jobDescriptor.setStatus(value);
-                } else if ("Job_Owner".equals(header)) {
-                    jobDescriptor.setOwner(value);
-                } else if ("resources_used.cput".equals(header)) {
-                    jobDescriptor.setUsedCPUTime(value);
-                } else if ("resources_used.mem".equals(header)) {
-                    jobDescriptor.setUsedMemory(value);
-                } else if ("resources_used.walltime".equals(header)) {
-                    jobDescriptor.setEllapsedTime(value);
-                } else if ("job_state".equals(header)) {
-                    jobDescriptor.setStatus(value);
-                } else if ("queue".equals(header))
-                    jobDescriptor.setQueueName(value);
-                else if ("ctime".equals(header)) {
-                    jobDescriptor.setCTime(value);
-                } else if ("qtime".equals(header)) {
-                    jobDescriptor.setQTime(value);
-                } else if ("mtime".equals(header)) {
-                    jobDescriptor.setMTime(value);
-                } else if ("start_time".equals(header)) {
-                    jobDescriptor.setSTime(value);
-                } else if ("comp_time".equals(header)) {
-                    jobDescriptor.setCompTime(value);
-                } else if ("exec_host".equals(header)) {
-                    jobDescriptor.setExecuteNode(value);
-                } else if ("Output_Path".equals(header)) {
-                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
-                        jobDescriptor.setStandardOutFile(value);
-                    else {
-                        jobDescriptor.setStandardOutFile(value + info[i + 1].trim());
-                        i++;
-                    }
-                } else if ("Error_Path".equals(header)) {
-                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
-                        jobDescriptor.setStandardErrorFile(value);
-                    else {
-                        String st = info[i + 1].trim();
-                        jobDescriptor.setStandardErrorFile(value + st);
-                        i++;
-                    }
-
-                } else if ("submit_args".equals(header)) {
-                    while (i + 1 < info.length) {
-                        if (info[i + 1].startsWith("\t")) {
-                            value += info[i + 1];
-                            i++;
-                        } else
-                            break;
-                    }
-                    value = value.replaceAll("\t", "");
-                    jobDescriptor.setSubmitArgs(value);
-                }
-            }
-        }
-    }
-
-    public String parseJobSubmission(String rawOutput) {
-        log.debug(rawOutput);
-        return rawOutput;  //In PBS stdout is going to be directly the jobID
-    }
-
-    public JobStatus parseJobStatus(String jobID, String rawOutput) {
-        boolean jobFount = false;
-        log.debug(rawOutput);
-        String[] info = rawOutput.split("\n");
-        String[] line = null;
-        int index = 0;
-        for (String anInfo : info) {
-            index++;
-            if (anInfo.contains("Job Id:")) {
-                if (anInfo.contains(jobID)) {
-                    jobFount = true;
-                    break;
-                }
-            }
-        }
-        if (jobFount) {
-            for (int i=index;i<info.length;i++) {
-                String anInfo = info[i];
-                if (anInfo.contains("=")) {
-                    line = anInfo.split("=", 2);
-                    if (line.length != 0) {
-                        if (line[0].contains("job_state")) {
-                            return JobStatus.valueOf(line[1].replaceAll(" ", ""));
-                        }
-                    }
-                }
-            }
-        }
-        return null;
-    }
-
-    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) {
-        log.debug(rawOutput);
-        String[]    info = rawOutput.split("\n");
-//        int lastStop = 0;
-        for (String jobID : statusMap.keySet()) {
-            String jobName = jobID.split(",")[1];
-            boolean found = false;
-            for (int i = 0; i < info.length; i++) {
-                if (info[i].contains(jobName.substring(0,8))) {
-                    // now starts processing this line
-                    log.info(info[i]);
-                    String correctLine = info[i];
-                    String[] columns = correctLine.split(" ");
-                    List<String> columnList = new ArrayList<String>();
-                    for (String s : columns) {
-                        if (!"".equals(s)) {
-                            columnList.add(s);
-                        }
-                    }
-//                    lastStop = i + 1;
-                    try {
-                        statusMap.put(jobID, JobStatus.valueOf(columnList.get(9)));
-                    }catch(IndexOutOfBoundsException e){
-                        statusMap.put(jobID, JobStatus.valueOf("U"));
-                    }
-                    found = true;
-                    break;
-                }
-            }
-            if(!found)
-            log.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobID.split(",")[0]);
-        }
-    }
-
-    @Override
-    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
-        /* output will look like
-        Job Id: 2080802.gordon-fe2.local
-            Job_Name = A312402627
-        */
-        String regJobId = "jobId";
-        Pattern pattern = Pattern.compile("(?<" + regJobId + ">[^\\s]*)\\s*.* " + jobName);
-        if (rawOutput != null) {
-            Matcher matcher = pattern.matcher(rawOutput);
-            if (matcher.find()) {
-                return matcher.group(regJobId);
-            } else {
-                log.error("No match is found for JobName");
-                return null;
-            }
-        } else {
-            log.error("Error: RawOutput shouldn't be null");
-            return null;
-        }
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmJobConfiguration.java
deleted file mode 100644
index 48ba48e..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmJobConfiguration.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-import org.apache.commons.io.FilenameUtils;
-
-import java.io.File;
-
-public class SlurmJobConfiguration implements JobManagerConfiguration {
-
-    private String jobDescriptionTemplateName;
-
-    private String scriptExtension;
-
-    private String installedPath;
-
-    private OutputParser parser;
-
-    public SlurmJobConfiguration(){
-        // this can be used to construct and use setter methods to set all the params in order
-    }
-    public SlurmJobConfiguration(String jobDescriptionTemplateName,
-                                   String scriptExtension,String installedPath,OutputParser parser) {
-        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
-        this.scriptExtension = scriptExtension;
-        this.parser = parser;
-        if (installedPath.endsWith("/")) {
-            this.installedPath = installedPath;
-        } else {
-            this.installedPath = installedPath + "/";
-        }
-    }
-
-    public RawCommandInfo getCancelCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "scancel " + jobID);
-    }
-
-    public String getJobDescriptionTemplateName() {
-        return jobDescriptionTemplateName;
-    }
-
-    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
-        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
-    }
-
-    public RawCommandInfo getMonitorCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "squeue -j " + jobID);
-    }
-
-    public String getScriptExtension() {
-        return scriptExtension;
-    }
-
-    public RawCommandInfo getSubmitCommand(String workingDirectory,String pbsFilePath) {
-          return new RawCommandInfo(this.installedPath + "sbatch " +
-                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
-    }
-
-    public String getInstalledPath() {
-        return installedPath;
-    }
-
-    public void setInstalledPath(String installedPath) {
-        this.installedPath = installedPath;
-    }
-
-    public OutputParser getParser() {
-        return parser;
-    }
-
-    public void setParser(OutputParser parser) {
-        this.parser = parser;
-    }
-
-    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
-        return new RawCommandInfo(this.installedPath + "squeue -u " + userName);
-    }
-
-    @Override
-    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
-        return new RawCommandInfo(this.installedPath + "squeue -n " + jobName + " -u " + userName);
-    }
-
-    @Override
-    public String getBaseCancelCommand() {
-        return "scancel";
-    }
-
-    @Override
-    public String getBaseMonitorCommand() {
-        return "squeue";
-    }
-
-    @Override
-    public String getBaseSubmitCommand() {
-        return "sbatch";
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmOutputParser.java
deleted file mode 100644
index 3d2dc48..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/SlurmOutputParser.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class SlurmOutputParser implements OutputParser {
-    private static final Logger log = LoggerFactory.getLogger(SlurmOutputParser.class);
-    public static final int JOB_NAME_OUTPUT_LENGTH = 8;
-    public static final String STATUS = "status";
-
-    public void parseSingleJob(JobDescriptor descriptor, String rawOutput) throws SSHApiException {
-        log.info(rawOutput);
-        String[] info = rawOutput.split("\n");
-        String lastString = info[info.length - 1];
-        if (lastString.contains("JOB ID")) {
-            // because there's no state
-            descriptor.setStatus("U");
-        } else {
-            int column = 0;
-            System.out.println(lastString);
-            for (String each : lastString.split(" ")) {
-                if (each.trim().isEmpty()) {
-                    continue;
-                } else {
-                    switch (column) {
-                        case 0:
-                            descriptor.setJobID(each);
-                            column++;
-                            break;
-                        case 1:
-                            descriptor.setPartition(each);
-                            column++;
-                            break;
-                        case 2:
-                            descriptor.setJobName(each);
-                            column++;
-                            break;
-                        case 3:
-                            descriptor.setUserName(each);
-                            column++;
-                            break;
-                        case 4:
-                            descriptor.setStatus(each);
-                            column++;
-                            break;
-                        case 5:
-                            descriptor.setUsedCPUTime(each);
-                            column++;
-                            break;
-                        case 6:
-                            try {
-                                int nodes = Integer.parseInt(each);
-                                descriptor.setNodes(nodes);
-                            }catch (Exception e){
-                                log.error("Node count read from command output is not an integer !!!");
-                            }
-                            column++;
-                            break;
-                        case 7:
-                            descriptor.setNodeList(each);
-                            column++;
-                            break;
-                    }
-                }
-            }
-        }
-
-    }
-
-    /**
-     * This can be used to parseSingleJob the outpu of sbatch and extrac the jobID from the content
-     *
-     * @param rawOutput
-     * @return
-     */
-    public String parseJobSubmission(String rawOutput) throws SSHApiException {
-        // FIXME : use regex to match correct jobId;
-        log.info(rawOutput);
-        String[] info = rawOutput.split("\n");
-        for (String anInfo : info) {
-            if (anInfo.contains("Submitted batch job")) {
-                String[] split = anInfo.split("Submitted batch job");
-                return split[1].trim();
-            }
-        }
-        return "";
-//        throw new SSHApiException(rawOutput);  //todo//To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public JobStatus parseJobStatus(String jobID, String rawOutput) throws SSHApiException {
-        log.info(rawOutput);
-        Pattern pattern = Pattern.compile(jobID + "(?=\\s+\\S+\\s+\\S+\\s+\\S+\\s+(?<" + STATUS + ">\\w+))");
-        Matcher matcher = pattern.matcher(rawOutput);
-        if (matcher.find()) {
-            return JobStatus.valueOf(matcher.group(STATUS));
-        }
-        return null;
-    }
-
-    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws SSHApiException {
-        log.debug(rawOutput);
-        String[] info = rawOutput.split("\n");
-        String lastString = info[info.length - 1];
-        if (lastString.contains("JOBID") || lastString.contains("PARTITION")) {
-            log.info("There are no jobs with this username ... ");
-            return;
-        }
-//        int lastStop = 0;
-        for (String jobID : statusMap.keySet()) {
-            String jobId = jobID.split(",")[0];
-            String jobName = jobID.split(",")[1];
-            boolean found = false;
-            for (int i = 0; i < info.length; i++) {
-                if (info[i].contains(jobName.substring(0, 8))) {
-                    // now starts processing this line
-                    log.info(info[i]);
-                    String correctLine = info[i];
-                    String[] columns = correctLine.split(" ");
-                    List<String> columnList = new ArrayList<String>();
-                    for (String s : columns) {
-                        if (!"".equals(s)) {
-                            columnList.add(s);
-                        }
-                    }
-                    try {
-                        statusMap.put(jobID, JobStatus.valueOf(columnList.get(4)));
-                    } catch (IndexOutOfBoundsException e) {
-                        statusMap.put(jobID, JobStatus.valueOf("U"));
-                    }
-                    found = true;
-                    break;
-                }
-            }
-            if (!found) {
-                log.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobId);
-            }
-        }
-    }
-
-    @Override
-    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
-        String regJobId = "jobId";
-        if (jobName == null) {
-            return null;
-        } else if(jobName.length() > JOB_NAME_OUTPUT_LENGTH) {
-            jobName = jobName.substring(0, JOB_NAME_OUTPUT_LENGTH);
-        }
-        Pattern pattern = Pattern.compile("(?=(?<" + regJobId + ">\\d+)\\s+\\w+\\s+" + jobName + ")"); // regex - look ahead and match
-        if (rawOutput != null) {
-            Matcher matcher = pattern.matcher(rawOutput);
-            if (matcher.find()) {
-                return matcher.group(regJobId);
-            } else {
-                log.error("No match is found for JobName");
-                return null;
-            }
-        } else {
-            log.error("Error: RawOutput shouldn't be null");
-            return null;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEJobConfiguration.java
deleted file mode 100644
index fddf210..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEJobConfiguration.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-import org.apache.commons.io.FilenameUtils;
-
-import java.io.File;
-
-public class UGEJobConfiguration implements JobManagerConfiguration {
-
-    private String jobDescriptionTemplateName;
-
-    private String scriptExtension;
-
-    private String installedPath;
-
-    private OutputParser parser;
-
-    public UGEJobConfiguration() {
-        // this can be used to construct and use setter methods to set all the params in order
-    }
-
-    public UGEJobConfiguration(String jobDescriptionTemplateName,
-                               String scriptExtension, String installedPath, OutputParser parser) {
-        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
-        this.scriptExtension = scriptExtension;
-        this.parser = parser;
-        if (installedPath.endsWith("/")) {
-            this.installedPath = installedPath;
-        } else {
-            this.installedPath = installedPath + "/";
-        }
-    }
-
-    public RawCommandInfo getCancelCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "qdel " + jobID);
-    }
-
-    public String getJobDescriptionTemplateName() {
-        return jobDescriptionTemplateName;
-    }
-
-    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
-        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
-    }
-
-    public RawCommandInfo getMonitorCommand(String jobID) {
-        return new RawCommandInfo(this.installedPath + "qstat -j " + jobID);
-    }
-
-    public String getScriptExtension() {
-        return scriptExtension;
-    }
-
-    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
-        return new RawCommandInfo(this.installedPath + "qsub " +
-                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
-    }
-
-    public String getInstalledPath() {
-        return installedPath;
-    }
-
-    public void setInstalledPath(String installedPath) {
-        this.installedPath = installedPath;
-    }
-
-    public OutputParser getParser() {
-        return parser;
-    }
-
-    public void setParser(OutputParser parser) {
-        this.parser = parser;
-    }
-
-    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
-        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
-    }
-
-    @Override
-    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
-        // For PBS there is no option to get jobDetails by JobName, so we search with userName
-        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
-    }
-
-    @Override
-    public String  getBaseCancelCommand() {
-        return "qdel";
-    }
-
-    @Override
-    public String  getBaseMonitorCommand() {
-        return "qstat";
-    }
-
-    @Override
-    public String getBaseSubmitCommand() {
-        return "qsub ";
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEOutputParser.java
deleted file mode 100644
index 3419b3a..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/api/job/UGEOutputParser.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.api.job;
-
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class UGEOutputParser implements OutputParser {
-    private static final Logger log = LoggerFactory.getLogger(PBSOutputParser.class);
-    public static final String JOB_ID = "jobId";
-
-    public void parseSingleJob(JobDescriptor jobDescriptor, String rawOutput) {
-        log.debug(rawOutput);
-        String[] info = rawOutput.split("\n");
-        String[] line;
-        for (int i = 0; i < info.length; i++) {
-            if (info[i].contains("=")) {
-                line = info[i].split("=", 2);
-            } else {
-                line = info[i].split(":", 2);
-            }
-            if (line.length >= 2) {
-                String header = line[0].trim();
-                log.debug("Header = " + header);
-                String value = line[1].trim();
-                log.debug("value = " + value);
-
-                if (header.equals("Variable_List")) {
-                    while (info[i + 1].startsWith("\t")) {
-                        value += info[i + 1];
-                        i++;
-                    }
-                    value = value.replaceAll("\t", "");
-                    jobDescriptor.setVariableList(value);
-                } else if ("Job Id".equals(header)) {
-                    jobDescriptor.setJobID(value);
-                } else if ("Job_Name".equals(header)) {
-                    jobDescriptor.setJobName(value);
-                } else if ("Account_Name".equals(header)) {
-                    jobDescriptor.setAcountString(value);
-                } else if ("job_state".equals(header)) {
-                    jobDescriptor.setStatus(value);
-                } else if ("Job_Owner".equals(header)) {
-                    jobDescriptor.setOwner(value);
-                } else if ("resources_used.cput".equals(header)) {
-                    jobDescriptor.setUsedCPUTime(value);
-                } else if ("resources_used.mem".equals(header)) {
-                    jobDescriptor.setUsedMemory(value);
-                } else if ("resources_used.walltime".equals(header)) {
-                    jobDescriptor.setEllapsedTime(value);
-                } else if ("job_state".equals(header)) {
-                    jobDescriptor.setStatus(value);
-                } else if ("queue".equals(header))
-                    jobDescriptor.setQueueName(value);
-                else if ("ctime".equals(header)) {
-                    jobDescriptor.setCTime(value);
-                } else if ("qtime".equals(header)) {
-                    jobDescriptor.setQTime(value);
-                } else if ("mtime".equals(header)) {
-                    jobDescriptor.setMTime(value);
-                } else if ("start_time".equals(header)) {
-                    jobDescriptor.setSTime(value);
-                } else if ("comp_time".equals(header)) {
-                    jobDescriptor.setCompTime(value);
-                } else if ("exec_host".equals(header)) {
-                    jobDescriptor.setExecuteNode(value);
-                } else if ("Output_Path".equals(header)) {
-                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
-                        jobDescriptor.setStandardOutFile(value);
-                    else {
-                        jobDescriptor.setStandardOutFile(value + info[i + 1].trim());
-                        i++;
-                    }
-                } else if ("Error_Path".equals(header)) {
-                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
-                        jobDescriptor.setStandardErrorFile(value);
-                    else {
-                        String st = info[i + 1].trim();
-                        jobDescriptor.setStandardErrorFile(value + st);
-                        i++;
-                    }
-
-                } else if ("submit_args".equals(header)) {
-                    while (i + 1 < info.length) {
-                        if (info[i + 1].startsWith("\t")) {
-                            value += info[i + 1];
-                            i++;
-                        } else
-                            break;
-                    }
-                    value = value.replaceAll("\t", "");
-                    jobDescriptor.setSubmitArgs(value);
-                }
-            }
-        }
-    }
-
-	public String parseJobSubmission(String rawOutput) {
-		log.debug(rawOutput);
-		if (rawOutput != null && !rawOutput.isEmpty()) {
-			String[] info = rawOutput.split("\n");
-			String lastLine = info[info.length - 1];
-			return lastLine.split(" ")[2]; // In PBS stdout is going to be directly the jobID
-		} else {
-			return "";
-		}
-	}
-
-    public JobStatus parseJobStatus(String jobID, String rawOutput) {
-        Pattern pattern = Pattern.compile("job_number:[\\s]+" + jobID);
-        Matcher matcher = pattern.matcher(rawOutput);
-        if (matcher.find()) {
-            return JobStatus.Q; // fixme; return correct status.
-        }
-        return JobStatus.U;
-    }
-
-    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) {
-        log.debug(rawOutput);
-        String[] info = rawOutput.split("\n");
-        int lastStop = 0;
-        for (String jobID : statusMap.keySet()) {
-            for(int i=lastStop;i<info.length;i++){
-               if(jobID.split(",")[0].contains(info[i].split(" ")[0]) && !"".equals(info[i].split(" ")[0])){
-                   // now starts processing this line
-                   log.info(info[i]);
-                   String correctLine = info[i];
-                   String[] columns = correctLine.split(" ");
-                   List<String> columnList = new ArrayList<String>();
-                   for (String s : columns) {
-                       if (!"".equals(s)) {
-                           columnList.add(s);
-                       }
-                   }
-                   lastStop = i+1;
-                   if ("E".equals(columnList.get(4))) {
-                       // There is another status with the same letter E other than error status
-                       // to avoid that we make a small tweek to the job status
-                       columnList.set(4, "Er");
-                   }
-                   statusMap.put(jobID, JobStatus.valueOf(columnList.get(4)));
-                   break;
-               }
-            }
-        }
-    }
-
-    @Override
-    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
-        if (jobName.length() > 10) {
-            jobName = jobName.substring(0, 10);
-        }
-        Pattern pattern = Pattern.compile("(?<" + JOB_ID + ">\\S+)\\s+\\S+\\s+(" + jobName + ")");
-        Matcher matcher = pattern.matcher(rawOutput);
-        if (matcher.find()) {
-            return matcher.group(JOB_ID);
-        }
-        return null;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/config/ConfigReader.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/config/ConfigReader.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/config/ConfigReader.java
deleted file mode 100644
index c40059e..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/config/ConfigReader.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.config;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-/**
- * Reads basic configurations.
- */
-public class ConfigReader {
-
-    private static final String CONFIGURATION_FILE = "gsissh.properties";
-
-
-    private Properties properties;
-
-    /**
-     * Reads configurations from the class path configuration file.
-     * @throws IOException If an error occurred while reading configurations.
-     */
-    public ConfigReader() throws IOException {
-        this.properties = getPropertiesFromClasspath(CONFIGURATION_FILE);
-    }
-
-    private Properties getPropertiesFromClasspath(String propFileName) throws IOException {
-        Properties props = new Properties();
-        InputStream inputStream = this.getClass().getClassLoader()
-                .getResourceAsStream(propFileName);
-
-        if (inputStream == null) {
-            throw new FileNotFoundException("System configuration file '" + propFileName
-                    + "' not found in the classpath");
-        }
-
-        props.load(inputStream);
-
-        return props;
-    }
-
-    public String getConfiguration(String key) {
-        return this.properties.getProperty(key);
-    }
-
-
-    /**
-     * Gets all the SSH related properties used by JSch.
-     * @return All properties.
-     */
-    public Properties getProperties() {
-        return this.properties;
-    }
-
-
-}


[6/7] airavata git commit: Removed gsi related code

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/DefaultJobSubmissionListener.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/DefaultJobSubmissionListener.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/DefaultJobSubmissionListener.java
deleted file mode 100644
index 18371b1..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/DefaultJobSubmissionListener.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.impl;
-
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-import org.apache.airavata.gfac.gsi.ssh.listener.JobSubmissionListener;
-
-public class DefaultJobSubmissionListener extends JobSubmissionListener {
-
-    public void statusChanged(JobDescriptor jobDescriptor) throws SSHApiException {
-        System.out.println("Job status has changed to : " + jobDescriptor.getStatus());
-    }
-
-    @Override
-    public void statusChanged(JobStatus jobStatus) throws SSHApiException {
-        System.out.println("Job status has changed to : " + jobStatus.toString());
-    }
-
-    @Override
-    public boolean isJobDone() throws SSHApiException {
-        return getJobStatus().equals(JobStatus.C);
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java
deleted file mode 100644
index 5f44843..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java
+++ /dev/null
@@ -1,777 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.impl;
-
-import com.jcraft.jsch.ExtendedSession;
-import com.jcraft.jsch.GSISSHIdentityFile;
-import com.jcraft.jsch.GSISSHIdentityRepository;
-import com.jcraft.jsch.Identity;
-import com.jcraft.jsch.JSch;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.Session;
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gfac.core.authentication.SSHKeyAuthentication;
-import org.apache.airavata.gfac.core.authentication.SSHPasswordAuthentication;
-import org.apache.airavata.gfac.core.authentication.SSHPublicKeyAuthentication;
-import org.apache.airavata.gfac.core.authentication.SSHPublicKeyFileAuthentication;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.gsi.ssh.api.CommandExecutor;
-import org.apache.airavata.gfac.gsi.ssh.config.ConfigReader;
-import org.apache.airavata.gfac.gsi.ssh.jsch.ExtendedJSch;
-import org.apache.airavata.gfac.gsi.ssh.util.SSHAPIUIKeyboardInteractive;
-import org.apache.airavata.gfac.gsi.ssh.util.SSHKeyPasswordHandler;
-import org.apache.airavata.gfac.gsi.ssh.util.SSHUtils;
-import org.apache.airavata.model.status.JobStatus;
-import org.apache.commons.io.FileUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.xml.transform.Source;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerConfigurationException;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.stream.StreamResult;
-import javax.xml.transform.stream.StreamSource;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.StringWriter;
-import java.net.URL;
-import java.security.SecureRandom;
-import java.util.List;
-import java.util.Map;
-
-public class GSISSHAbstractCluster implements RemoteCluster {
-
-    private static final Logger log = LoggerFactory.getLogger(GSISSHAbstractCluster.class);
-    public static final String X509_CERT_DIR = "X509_CERT_DIR";
-    public static final String SSH_SESSION_TIMEOUT = "ssh.session.timeout";
-
-    public JobManagerConfiguration jobManagerConfiguration;
-
-    private ServerInfo serverInfo;
-
-    private AuthenticationInfo authenticationInfo;
-
-    private Session session;
-
-    private ConfigReader configReader;
-	
-    private JSch defaultJSch;
-
-    private static Identity identityFile = null;
-
-    public GSISSHAbstractCluster(ServerInfo serverInfo, AuthenticationInfo authenticationInfo, JobManagerConfiguration config) throws SSHApiException {
-        this(serverInfo, authenticationInfo);
-        this.jobManagerConfiguration = config;
-    }
-
-    public  GSISSHAbstractCluster(ServerInfo serverInfo, AuthenticationInfo authenticationInfo) throws SSHApiException {
-
-        reconnect(serverInfo, authenticationInfo);
-    }
-
-    public GSISSHAbstractCluster(JobManagerConfiguration config) {
-        this.jobManagerConfiguration = config;
-    }
-    private synchronized void reconnect(ServerInfo serverInfo, AuthenticationInfo authenticationInfo) throws SSHApiException {
-        this.serverInfo = serverInfo;
-
-        this.authenticationInfo = authenticationInfo;
-
-        if (authenticationInfo instanceof GSIAuthenticationInfo) {
-            JSch.setConfig("gssapi-with-mic.x509", "org.apache.airavata.gfac.ssh.GSSContextX509");
-            JSch.setConfig("userauth.gssapi-with-mic", "com.jcraft.jsch.UserAuthGSSAPIWithMICGSSCredentials");
-            System.setProperty(X509_CERT_DIR, (String) ((GSIAuthenticationInfo) authenticationInfo).getProperties().
-                    get("X509_CERT_DIR"));
-        }
-
-
-        try {
-            this.configReader = new ConfigReader();
-        } catch (IOException e) {
-            throw new SSHApiException("Unable to load system configurations.", e);
-        }
-        try {
-        	 if(defaultJSch == null){
-             	defaultJSch = createJSch(authenticationInfo);
-             }
-     	        log.debug("Connecting to server - " + serverInfo.getHost() + ":" + serverInfo.getPort() + " with user name - "
-                     + serverInfo.getUserName());
-
-        	session = createSession(defaultJSch,serverInfo.getUserName(), serverInfo.getHost(), serverInfo.getPort());
-        	}
-        	catch (Exception e) {
-            throw new SSHApiException("An exception occurred while creating SSH session." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        //=============================================================
-        // Handling vanilla SSH pieces
-        //=============================================================
-        if (authenticationInfo instanceof SSHPasswordAuthentication) {
-            String password = ((SSHPasswordAuthentication) authenticationInfo).
-                    getPassword(serverInfo.getUserName(), serverInfo.getHost());
-
-            session.setUserInfo(new SSHAPIUIKeyboardInteractive(password));
-
-            // TODO figure out why we need to set password to session
-            session.setPassword(password);
-
-        } else if (authenticationInfo instanceof SSHPublicKeyFileAuthentication) {
-
-            SSHPublicKeyFileAuthentication sshPublicKeyFileAuthentication
-                    = (SSHPublicKeyFileAuthentication) authenticationInfo;
-            String privateKeyFile = sshPublicKeyFileAuthentication.
-                    getPrivateKeyFile(serverInfo.getUserName(), serverInfo.getHost());
-
-            logDebug("The private key file for vanilla SSH " + privateKeyFile);
-
-            String publicKeyFile = sshPublicKeyFileAuthentication.
-                    getPublicKeyFile(serverInfo.getUserName(), serverInfo.getHost());
-
-            logDebug("The public key file for vanilla SSH " + publicKeyFile);
-
-            try {
-                identityFile = GSISSHIdentityFile.newInstance(privateKeyFile, null, defaultJSch);
-            } catch (JSchException e) {
-                throw new SSHApiException("An exception occurred while initializing keys using files. " +
-                        "(private key and public key)." +
-                        "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                        " connecting user name - "
-                        + serverInfo.getUserName() + " private key file - " + privateKeyFile + ", public key file - " +
-                        publicKeyFile, e);
-            }
-
-            // Add identity to identity repository
-            GSISSHIdentityRepository identityRepository = new GSISSHIdentityRepository(defaultJSch);
-            identityRepository.add(identityFile);
-
-            // Set repository to session
-            session.setIdentityRepository(identityRepository);
-
-            // Set the user info
-            SSHKeyPasswordHandler sshKeyPasswordHandler
-                    = new SSHKeyPasswordHandler((SSHKeyAuthentication) authenticationInfo);
-
-            session.setUserInfo(sshKeyPasswordHandler);
-
-        } else if (authenticationInfo instanceof SSHPublicKeyAuthentication) {
-
-            SSHPublicKeyAuthentication sshPublicKeyAuthentication
-                    = (SSHPublicKeyAuthentication) authenticationInfo;
-            try {
-                String name = serverInfo.getUserName() + "_" + serverInfo.getHost();
-                identityFile = GSISSHIdentityFile.newInstance(name,
-                        sshPublicKeyAuthentication.getPrivateKey(serverInfo.getUserName(), serverInfo.getHost()),
-                        sshPublicKeyAuthentication.getPublicKey(serverInfo.getUserName(), serverInfo.getHost()), defaultJSch);
-            } catch (JSchException e) {
-                throw new SSHApiException("An exception occurred while initializing keys using byte arrays. " +
-                        "(private key and public key)." +
-                        "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                        " connecting user name - "
-                        + serverInfo.getUserName(), e);
-            }
-
-            // Add identity to identity repository
-            GSISSHIdentityRepository identityRepository = new GSISSHIdentityRepository(defaultJSch);
-            identityRepository.add(identityFile);
-
-            // Set repository to session
-            session.setIdentityRepository(identityRepository);
-
-            // Set the user info
-            SSHKeyPasswordHandler sshKeyPasswordHandler
-                    = new SSHKeyPasswordHandler((SSHKeyAuthentication) authenticationInfo);
-
-            session.setUserInfo(sshKeyPasswordHandler);
-
-        }
-
-        // Not a good way, but we dont have any choice
-        if (session instanceof ExtendedSession) {
-            if (authenticationInfo instanceof GSIAuthenticationInfo) {
-                ((ExtendedSession) session).setAuthenticationInfo((GSIAuthenticationInfo) authenticationInfo);
-            }
-        }
-
-        try {
-            session.connect(Integer.parseInt(configReader.getConfiguration(SSH_SESSION_TIMEOUT)));
-        } catch (Exception e) {
-            throw new SSHApiException("An exception occurred while connecting to server." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-    }
-
-    public synchronized boolean cancelJob(String jobID) throws SSHApiException {
-        JobStatus jobStatus = getJobStatus(jobID);
-        if (jobStatus == null || jobStatus == JobStatus.U) {
-            log.info("Validation before cancel is failed, couldn't found job in remote host to cancel. Job may be already completed|failed|canceled");
-            return false;
-        }
-        RawCommandInfo rawCommandInfo = jobManagerConfiguration.getCancelCommand(jobID);
-
-        StandardOutReader stdOutReader = new StandardOutReader();
-        log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-        CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader);
-        String outputifAvailable = getOutputifAvailable(stdOutReader, "Error reading output of job submission", jobManagerConfiguration.getBaseCancelCommand());
-        // this might not be the case for all teh resources, if so Cluster implementation can override this method
-        // because here after cancelling we try to get the job description and return it back
-	    return true;
-    }
-
-    public synchronized String submitBatchJob(String scriptPath, String workingDirectory) throws SSHApiException {
-        this.scpTo(workingDirectory, scriptPath);
-
-        // since this is a constant we do not ask users to fill this
-
-//        RawCommandInfo rawCommandInfo = new RawCommandInfo(this.installedPath + this.jobManagerConfiguration.getSubmitCommand() + " " +
-//                workingDirectory + File.separator + FilenameUtils.getName(scriptPath));
-
-        RawCommandInfo rawCommandInfo = jobManagerConfiguration.getSubmitCommand(workingDirectory,scriptPath);
-        StandardOutReader standardOutReader = new StandardOutReader();
-        log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-        CommandExecutor.executeCommand(rawCommandInfo, this.session, standardOutReader);
-
-        //Check whether pbs submission is successful or not, if it failed throw and exception in submitJob method
-        // with the error thrown in qsub command
-        //
-        String outputifAvailable = getOutputifAvailable(standardOutReader,"Error reading output of job submission",jobManagerConfiguration.getBaseSubmitCommand());
-        OutputParser outputParser = jobManagerConfiguration.getParser();
-        return  outputParser.parseJobSubmission(outputifAvailable);
-    }
-
-    public void generateJobScript(JobDescriptor jobDescriptor) throws SSHApiException {
-        TransformerFactory factory = TransformerFactory.newInstance();
-        URL resource = this.getClass().getClassLoader().getResource(jobManagerConfiguration.getJobDescriptionTemplateName());
-
-        if (resource == null) {
-            String error = "System configuration file '" + jobManagerConfiguration.getJobDescriptionTemplateName()
-                    + "' not found in the classpath";
-            throw new SSHApiException(error);
-        }
-
-        Source xslt = new StreamSource(new File(resource.getPath()));
-        Transformer transformer;
-        StringWriter results = new StringWriter();
-        File tempPBSFile = null;
-        try {
-            // generate the pbs script using xslt
-            transformer = factory.newTransformer(xslt);
-            Source text = new StreamSource(new ByteArrayInputStream(jobDescriptor.toXML().getBytes()));
-            transformer.transform(text, new StreamResult(results));
-            String scriptContent = results.toString().replaceAll("^[ |\t]*\n$", "");
-            if (scriptContent.startsWith("\n")) {
-                scriptContent = scriptContent.substring(1);
-            }
-//            log.debug("generated PBS:" + results.toString());
-
-            // creating a temporary file using pbs script generated above
-            int number = new SecureRandom().nextInt();
-            number = (number < 0 ? -number : number);
-
-            tempPBSFile = new File(Integer.toString(number) + jobManagerConfiguration.getScriptExtension());
-            log.info("File Path: " + tempPBSFile.getAbsolutePath());
-            log.info("File Content: " + scriptContent);
-            FileUtils.writeStringToFile(tempPBSFile, scriptContent);
-        } catch (TransformerConfigurationException e) {
-            throw new SSHApiException("Error parsing PBS transformation", e);
-        } catch (TransformerException e) {
-            throw new SSHApiException("Error generating PBS script", e);
-        } catch (IOException e) {
-            throw new SSHApiException("An exception occurred while connecting to server." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        } finally {
-            if (tempPBSFile != null) {
-                tempPBSFile.delete();
-            }
-        }
-    }
-
-
-
-    public synchronized JobDescriptor getJobDescriptorById(String jobID) throws SSHApiException {
-        RawCommandInfo rawCommandInfo = jobManagerConfiguration.getMonitorCommand(jobID);
-        StandardOutReader stdOutReader = new StandardOutReader();
-        log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-        CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader);
-        String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !",jobManagerConfiguration.getBaseMonitorCommand());
-        JobDescriptor jobDescriptor = new JobDescriptor();
-        jobManagerConfiguration.getParser().parseSingleJob(jobDescriptor, result);
-        return jobDescriptor;
-    }
-
-    public synchronized JobStatus getJobStatus(String jobID) throws SSHApiException {
-        RawCommandInfo rawCommandInfo = jobManagerConfiguration.getMonitorCommand(jobID);
-        StandardOutReader stdOutReader = new StandardOutReader();
-        log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-        CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader);
-        String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !", jobManagerConfiguration.getBaseMonitorCommand());
-        return jobManagerConfiguration.getParser().parseJobStatus(jobID, result);
-    }
-
-    @Override
-    public String getJobIdByJobName(String jobName, String userName) throws SSHApiException {
-        RawCommandInfo rawCommandInfo = jobManagerConfiguration.getJobIdMonitorCommand(jobName, userName);
-        StandardOutReader stdOutReader = new StandardOutReader();
-        log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-        CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader);
-        String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !",
-                jobManagerConfiguration.getJobIdMonitorCommand(jobName,userName).getCommand());
-        return jobManagerConfiguration.getParser().parseJobId(jobName, result);
-    }
-
-    private static void logDebug(String message) {
-        if (log.isDebugEnabled()) {
-            log.debug(message);
-        }
-    }
-
-    public JobManagerConfiguration getJobManagerConfiguration() {
-        return jobManagerConfiguration;
-    }
-
-    public void setJobManagerConfiguration(JobManagerConfiguration jobManagerConfiguration) {
-        this.jobManagerConfiguration = jobManagerConfiguration;
-    }
-
-    public synchronized void scpTo(String remoteFile, String localFile) throws SSHApiException {
-        int retry = 3;
-        while (retry > 0) {
-            try {
-                if (!session.isConnected()) {
-                    session.connect();
-                }
-                log.info("Transfering file:/" + localFile + " To:" + serverInfo.getHost() + ":" + remoteFile);
-                SSHUtils.scpTo(remoteFile, localFile, session);
-                retry = 0;
-            } catch (IOException e) {
-                retry--;
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file "
-                            + serverInfo.getHost() + ":rFile : " + remoteFile, e);
-                }
-            } catch (JSchException e) {
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file "
-                            + serverInfo.getHost() + ":rFile : " + remoteFile, e);
-                }
-            }
-        }
-    }
-
-    public synchronized void scpFrom(String remoteFile, String localFile) throws SSHApiException {
-        int retry = 3;
-        while(retry>0) {
-            try {
-                if (!session.isConnected()) {
-                    session.connect();
-                }
-                log.info("Transfering from:" + serverInfo.getHost() + ":" + remoteFile + " To:" + "file:/" + localFile);
-                SSHUtils.scpFrom(remoteFile, localFile, session);
-                retry=0;
-            } catch (IOException e) {
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file "
-                            + serverInfo.getHost() + ":rFile", e);
-                }else{
-                    log.error("Error performing scp but doing a retry");
-                }
-            } catch (JSchException e) {
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if(retry==0) {
-                    throw new SSHApiException("Failed during scping local file:" + localFile + " to remote file "
-                            + serverInfo.getHost() + ":rFile", e);
-                }else{
-                    log.error("Error performing scp but doing a retry");
-                }
-            }
-        }
-    }
-    
-    public synchronized void scpThirdParty(String remoteFileSource, String remoteFileTarget) throws SSHApiException {
-        try {
-            if(!session.isConnected()){
-                session.connect();
-            }
-            log.info("Transfering from:" + remoteFileSource + " To: " + remoteFileTarget);
-            SSHUtils.scpThirdParty(remoteFileSource, remoteFileTarget, session);
-        } catch (IOException e) {
-            throw new SSHApiException("Failed during scping  file:" + remoteFileSource + " to remote file "
-                    +remoteFileTarget , e);
-        } catch (JSchException e) {
-            throw new SSHApiException("Failed during scping  file:" + remoteFileSource + " to remote file "
-                    +remoteFileTarget, e);
-        }
-    }
-
-    public synchronized void makeDirectory(String directoryPath) throws SSHApiException {
-        int retry = 3;
-        while (retry > 0) {
-            try {
-                if (!session.isConnected()) {
-                    session.connect();
-                }
-                log.info("Creating directory: " + serverInfo.getHost() + ":" + directoryPath);
-                SSHUtils.makeDirectory(directoryPath, session);
-                retry = 0;
-            } catch (IOException e) {
-                throw new SSHApiException("Failed during creating directory:" + directoryPath + " to remote file "
-                        + serverInfo.getHost() + ":rFile", e);
-            } catch (JSchException e) {
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during creating directory :" + directoryPath + " to remote file "
-                            + serverInfo.getHost() + ":rFile", e);
-                }
-            } catch (SSHApiException e) {
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during creating directory :" + directoryPath + " to remote file "
-                            + serverInfo.getHost() + ":rFile", e);
-                }
-            }
-        }
-    }
-
-    public synchronized List<String> listDirectory(String directoryPath) throws SSHApiException {
-        int retry = 3;
-        List<String> files = null;
-        while (retry > 0) {
-            try {
-                if (!session.isConnected()) {
-                    session.connect();
-                }
-                log.info("Listing directory: " + serverInfo.getHost() + ":" + directoryPath);
-                files = SSHUtils.listDirectory(directoryPath, session);
-                retry=0;
-            } catch (IOException e) {
-                log.error(e.getMessage(), e);
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during listing directory:" + directoryPath + " to remote file ", e);
-                }
-            } catch (JSchException e) {
-                retry--;
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during listing directory :" + directoryPath + " to remote file ", e);
-                }
-            }catch (SSHApiException e) {
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed during listing directory :" + directoryPath + " to remote file "
-                            + serverInfo.getHost() + ":rFile", e);
-                }
-            }
-        }
-        return files;
-    }
-
-    @Deprecated
-    public synchronized void getJobStatuses(String userName, Map<String,JobStatus> jobIDs)throws SSHApiException {
-        int retry = 3;
-        RawCommandInfo rawCommandInfo = jobManagerConfiguration.getUserBasedMonitorCommand(userName);
-        StandardOutReader stdOutReader = new StandardOutReader();
-        while (retry > 0){
-            try {
-                log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-                CommandExecutor.executeCommand(rawCommandInfo, this.getSession(), stdOutReader);
-                retry=0;
-            } catch (SSHApiException e) {
-                retry--;
-                try {
-                    Thread.sleep(5000);
-                } catch (InterruptedException e1) {
-                    log.error(e1.getMessage(), e1);
-                }
-                reconnect(serverInfo, authenticationInfo);
-                if (retry == 0) {
-                    throw new SSHApiException("Failed Getting statuses  to remote file", e);
-                }
-            }
-        }
-        String result = getOutputifAvailable(stdOutReader, "Error getting job information from the resource !", jobManagerConfiguration.getBaseMonitorCommand());
-        jobManagerConfiguration.getParser().parseJobStatuses(userName, jobIDs, result);
-    }
-
-    public ServerInfo getServerInfo() {
-        return serverInfo;
-    }
-
-    public AuthenticationInfo getAuthenticationInfo() {
-        return authenticationInfo;
-    }
-
-    /**
-     * This gaurantee to return a valid session
-     *
-     * @return
-     */
-    public Session getSession() {
-        return this.session;
-    }
-
-    /**
-     * This method will read standard output and if there's any it will be parsed
-     *
-     * @param jobIDReaderCommandOutput
-     * @param errorMsg
-     * @return
-     * @throws SSHApiException
-     */
-    private String getOutputifAvailable(StandardOutReader jobIDReaderCommandOutput, String errorMsg, String command) throws SSHApiException {
-        String stdOutputString = jobIDReaderCommandOutput.getStdOutputString();
-        String stdErrorString = jobIDReaderCommandOutput.getStdErrorString();
-        log.info("StandardOutput Returned:" + stdOutputString);
-        log.info("StandardError  Returned:" +stdErrorString);
-        String[] list = command.split(File.separator);
-        command = list[list.length - 1];
-        // We are checking for stderr containing the command issued. Thus ignores the verbose logs in stderr.
-        if (stdErrorString != null && stdErrorString.contains(command.trim()) && !stdErrorString.contains("Warning")) {
-            log.error("Standard Error output : " + stdErrorString);
-            throw new SSHApiException(errorMsg + "\n\r StandardOutput: "+ stdOutputString + "\n\r StandardError: "+ stdErrorString);
-        }else if(stdOutputString.contains("error")){
-            throw new SSHApiException(errorMsg + "\n\r StandardOutput: "+ stdOutputString + "\n\r StandardError: "+ stdErrorString);
-        }
-        return stdOutputString;
-    }
-
-    public void disconnect() throws SSHApiException {
-    	if(getSession().isConnected()){
-    		getSession().disconnect();
-    	}
-    }
-    /**
-	
-	 *            the file system abstraction which will be necessary to
-	 *            perform certain file system operations.
-	 * @return the new default JSch implementation.
-	 * @throws JSchException
-	 *             known host keys cannot be loaded.
-	 */
-	protected JSch createJSch(AuthenticationInfo authenticationInfo) throws JSchException {
-//		final File fs = new File(System.getProperty("user.home"));
-		if(authenticationInfo instanceof GSIAuthenticationInfo){
-			final JSch jsch = new ExtendedJSch();
-//			knownHosts(jsch, fs);
-			return jsch;
-		}else{
-		final JSch jsch = new JSch();
-//		knownHosts(jsch, fs);
-		return jsch;
-		}
-		
-	}
-	/**
-	 * Create a new remote session for the requested address.
-	 *
-	 * @param user
-	 *            login to authenticate as.
-	 * @param host
-	 *            server name to connect to.
-	 * @param port
-	 *            port number of the SSH daemon (typically 22).
-	 * @return new session instance, but otherwise unconfigured.
-	 * @throws JSchException
-	 *             the session could not be created.
-	 */
-	private Session createSession(JSch jsch, String user, String host, int port) throws JSchException {
-		final Session session = jsch.getSession(user, host, port);
-		// We retry already in getSession() method. JSch must not retry
-		// on its own.
-		session.setConfig("MaxAuthTries", "1"); //$NON-NLS-1$ //$NON-NLS-2$
-		session.setTimeout(Integer.parseInt(configReader.getConfiguration(SSH_SESSION_TIMEOUT)));
-	    java.util.Properties config = this.configReader.getProperties();
-	    session.setConfig(config);
-	    
-    	return session;
-	}
-	private static void knownHosts(final JSch sch,final File home) throws JSchException {
-		if (home == null)
-			return;
-		final File known_hosts = new File(new File(home, ".ssh"), "known_hosts"); //$NON-NLS-1$ //$NON-NLS-2$
-		try {
-			final FileInputStream in = new FileInputStream(known_hosts);
-			try {
-				sch.setKnownHosts(in);
-			} finally {
-				in.close();
-			}
-		} catch (FileNotFoundException none) {
-			// Oh well. They don't have a known hosts in home.
-		} catch (IOException err) {
-			// Oh well. They don't have a known hosts in home.
-		}
-	}
-
-
-	/**
-	 * This will contains all the PBS specific job statuses.
-	 * C -  Job is completed after having run/
-	 * E -  Job is exiting after having run.
-	 * H -  Job is held.
-	 * Q -  job is queued, eligible to run or routed.
-	 * R -  job is running.
-	 * T -  job is being moved to new location.
-	 * W -  job is waiting for its execution time
-	 * (-a option) to be reached.
-	 * S -  (Unicos only) job is suspend.
-	 */
-	public enum HPCJobStatus {
-		C, E, H, Q, R, T, W, S,U,F,CA,CD,CF,CG,NF,PD,PR,TO,qw,t,r,h,Er,Eqw,PEND,RUN,PSUSP,USUSP,SSUSP,DONE,EXIT,UNKWN,ZOMBI;
-
-		public static HPCJobStatus fromString(String status){
-			if(status != null){
-				if("C".equals(status)){
-					return HPCJobStatus.C;
-				}else if("E".equals(status)){
-					return HPCJobStatus.E;
-				}else if("H".equals(status)){
-					return HPCJobStatus.H;
-				}else if("Q".equals(status)){
-					return HPCJobStatus.Q;
-				}else if("R".equals(status)){
-					return HPCJobStatus.R;
-				}else if("T".equals(status)){
-					return HPCJobStatus.T;
-				}else if("W".equals(status)){
-					return HPCJobStatus.W;
-				}else if("S".equals(status)){
-					return HPCJobStatus.S;
-				}else if("F".equals(status)){
-					return HPCJobStatus.F;
-				}else if("S".equals(status)){
-					return HPCJobStatus.S;
-				}else if("CA".equals(status)){
-					return HPCJobStatus.CA;
-				}else if("CF".equals(status)){
-					return HPCJobStatus.CF;
-				}else if("CD".equals(status)){
-					return HPCJobStatus.CD;
-				}else if("CG".equals(status)){
-					return HPCJobStatus.CG;
-				}else if("NF".equals(status)){
-					return HPCJobStatus.NF;
-				}else if("PD".equals(status)){
-					return HPCJobStatus.PD;
-				}else if("PR".equals(status)){
-					return HPCJobStatus.PR;
-				}else if("TO".equals(status)){
-					return HPCJobStatus.TO;
-				}else if("U".equals(status)){
-					return HPCJobStatus.U;
-				}else if("qw".equals(status)){
-					return HPCJobStatus.qw;
-				}else if("t".equals(status)){
-					return HPCJobStatus.t;
-				}else if("r".equals(status)){
-					return HPCJobStatus.r;
-				}else if("h".equals(status)){
-					return HPCJobStatus.h;
-				}else if("Er".equals(status)){
-					return HPCJobStatus.Er;
-				}else if("Eqw".equals(status)){
-					return HPCJobStatus.Er;
-				}else if("RUN".equals(status)){      // LSF starts here
-					return HPCJobStatus.RUN;
-				}else if("PEND".equals(status)){
-					return HPCJobStatus.PEND;
-				}else if("DONE".equals(status)){
-					return HPCJobStatus.DONE;
-				}else if("PSUSP".equals(status)){
-					return HPCJobStatus.PSUSP;
-				}else if("USUSP".equals(status)){
-					return HPCJobStatus.USUSP;
-				}else if("SSUSP".equals(status)){
-					return HPCJobStatus.SSUSP;
-				}else if("EXIT".equals(status)){
-					return HPCJobStatus.EXIT;
-				}else if("ZOMBI".equals(status)){
-					return HPCJobStatus.ZOMBI;
-				}
-			}
-			return HPCJobStatus.U;
-		}
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java
deleted file mode 100644
index 55a0ab6..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java
+++ /dev/null
@@ -1,333 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.impl;
-
-import com.jcraft.jsch.ChannelExec;
-import com.jcraft.jsch.JSch;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.Session;
-import com.jcraft.jsch.UserInfo;
-import org.apache.airavata.common.exception.AiravataException;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.authentication.SSHKeyAuthentication;
-import org.apache.airavata.gfac.core.cluster.CommandInfo;
-import org.apache.airavata.gfac.core.cluster.CommandOutput;
-import org.apache.airavata.gfac.core.cluster.OutputParser;
-import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.impl.SSHUtils;
-import org.apache.airavata.model.status.JobStatus;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-
-/**
- * One Remote cluster instance for each compute resource.
- */
-public class HPCRemoteCluster implements RemoteCluster{
-    private static final Logger log = LoggerFactory.getLogger(HPCRemoteCluster.class);
-	private final SSHKeyAuthentication authentication;
-	private final ServerInfo serverInfo;
-	private final JobManagerConfiguration jobManagerConfiguration;
-	private final JSch jSch;
-	private Session session;
-	private OutputParser outputParser;
-
-	public HPCRemoteCluster(ServerInfo serverInfo, JobManagerConfiguration jobManagerConfiguration, AuthenticationInfo
-			authenticationInfo, OutputParser outputParser) throws AiravataException {
-		try {
-			this.serverInfo = serverInfo;
-			this.jobManagerConfiguration = jobManagerConfiguration;
-			if (authenticationInfo instanceof SSHKeyAuthentication) {
-				authentication = (SSHKeyAuthentication) authenticationInfo;
-			} else {
-				throw new AiravataException("Support ssh key authentication only");
-			}
-			this.outputParser = outputParser;
-			jSch = new JSch();
-			jSch.addIdentity(authentication.getPrivateKeyFilePath(), authentication.getPublicKeyFilePath(), authentication
-					.getPassphrase().getBytes());
-			session = jSch.getSession(serverInfo.getUserName(), serverInfo.getHost(), serverInfo.getPort());
-			session.setUserInfo(new DefaultUserInfo(serverInfo.getUserName(), null, authentication.getPassphrase()));
-			session.connect(); // 0 connection timeout
-		} catch (JSchException e) {
-			throw new AiravataException("JSch initialization error ", e);
-		}
-	}
-
-	@Override
-	public String submitBatchJob(String jobScriptFilePath, String workingDirectory) throws SSHApiException {
-		scpTo(jobScriptFilePath, workingDirectory); // scp script file to working directory
-		RawCommandInfo submitCommand = jobManagerConfiguration.getSubmitCommand(workingDirectory, jobScriptFilePath);
-
-		StandardOutReader reader = new StandardOutReader();
-		executeCommand(submitCommand, reader);
-		throwExceptionOnError(reader, submitCommand);
-		return outputParser.parseJobSubmission(reader.getStdOutputString());
-	}
-
-	@Override
-	public void scpTo(String localFile, String remoteFile) throws SSHApiException {
-		int retry = 3;
-		while (retry > 0) {
-			try {
-				if (!session.isConnected()) {
-					session.connect();
-				}
-				log.info("Transferring localhost:" + localFile  + " to " + serverInfo.getHost() + ":" + remoteFile);
-				SSHUtils.scpTo(localFile, remoteFile, session);
-				retry = 0;
-			} catch (Exception e) {
-				retry--;
-				if (!session.isConnected()) {
-					try {
-						session.connect();
-					} catch (JSchException e1) {
-						throw new SSHApiException("JSch Session connection failed");
-					}
-				}
-				if (retry == 0) {
-					throw new SSHApiException("Failed to scp localhost:" + localFile + " to " + serverInfo.getHost() +
-							":" + remoteFile, e);
-				} else {
-					log.info("Retry transfer localhost:" + localFile + " to " + serverInfo.getHost() + ":" +
-							remoteFile);
-				}
-			}
-		}
-	}
-
-	@Override
-	public void scpFrom(String remoteFile, String localFile) throws SSHApiException {
-		int retry = 3;
-		while(retry>0) {
-			try {
-				if (!session.isConnected()) {
-					session.connect();
-				}
-				log.info("Transferring " + serverInfo.getHost() + ":" + remoteFile + " To localhost:" + localFile);
-				SSHUtils.scpFrom(remoteFile, localFile, session);
-				retry=0;
-			} catch (Exception e) {
-				retry--;
-				if (!session.isConnected()) {
-					try {
-						session.connect();
-					} catch (JSchException e1) {
-						throw new SSHApiException("JSch Session connection failed");
-					}
-				}
-				if (retry == 0) {
-					throw new SSHApiException("Failed to scp " + serverInfo.getHost() + ":" + remoteFile + " to " +
-							"localhost:" + localFile, e);
-				} else {
-					log.info("Retry transfer " + serverInfo.getHost() + ":" + remoteFile + "  to localhost:" + localFile);
-				}
-			}
-		}
-	}
-
-	@Override
-	public void scpThirdParty(String remoteFileSource, String remoteFileTarget) throws SSHApiException {
-		try {
-			if(!session.isConnected()){
-				session.connect();
-			}
-			log.info("Transferring from:" + remoteFileSource + " To: " + remoteFileTarget);
-			SSHUtils.scpThirdParty(remoteFileSource, remoteFileTarget, session);
-		} catch (IOException | JSchException e) {
-			throw new SSHApiException("Failed scp file:" + remoteFileSource + " to remote file "
-					+remoteFileTarget , e);
-		}
-	}
-
-	@Override
-	public void makeDirectory(String directoryPath) throws SSHApiException {
-		try {
-			if (!session.isConnected()) {
-				session.connect();
-			}
-			log.info("Creating directory: " + serverInfo.getHost() + ":" + directoryPath);
-			SSHUtils.makeDirectory(directoryPath, session);
-		} catch (JSchException | IOException e) {
-			throw new SSHApiException("Failed to create directory " + serverInfo.getHost() + ":" + directoryPath);
-		}
-	}
-
-	@Override
-	public boolean cancelJob(String jobId) throws SSHApiException {
-		RawCommandInfo cancelCommand = jobManagerConfiguration.getCancelCommand(jobId);
-		StandardOutReader reader = new StandardOutReader();
-		executeCommand(cancelCommand, reader);
-		throwExceptionOnError(reader, cancelCommand);
-		return true;
-	}
-
-	@Override
-	public JobStatus getJobStatus(String jobId) throws SSHApiException {
-		RawCommandInfo monitorCommand = jobManagerConfiguration.getMonitorCommand(jobId);
-		StandardOutReader reader = new StandardOutReader();
-		executeCommand(monitorCommand, reader);
-		throwExceptionOnError(reader, monitorCommand);
-		return outputParser.parseJobStatus(jobId, reader.getStdOutputString());
-	}
-
-	@Override
-	public String getJobIdByJobName(String jobName, String userName) throws SSHApiException {
-		RawCommandInfo jobIdMonitorCommand = jobManagerConfiguration.getJobIdMonitorCommand(jobName, userName);
-		StandardOutReader reader = new StandardOutReader();
-		executeCommand(jobIdMonitorCommand, reader);
-		throwExceptionOnError(reader, jobIdMonitorCommand);
-		return outputParser.parseJobId(jobName, reader.getStdOutputString());
-	}
-
-	@Override
-	public void getJobStatuses(String userName, Map<String, JobStatus> jobStatusMap) throws SSHApiException {
-		RawCommandInfo userBasedMonitorCommand = jobManagerConfiguration.getUserBasedMonitorCommand(userName);
-		StandardOutReader reader = new StandardOutReader();
-		executeCommand(userBasedMonitorCommand, reader);
-		throwExceptionOnError(reader, userBasedMonitorCommand);
-		outputParser.parseJobStatuses(userName, jobStatusMap, reader.getStdOutputString());
-	}
-
-	@Override
-	public List<String> listDirectory(String directoryPath) throws SSHApiException {
-		try {
-			if (!session.isConnected()) {
-				session.connect();
-			}
-			log.info("Creating directory: " + serverInfo.getHost() + ":" + directoryPath);
-			return SSHUtils.listDirectory(directoryPath, session);
-		} catch (JSchException | IOException e) {
-			throw new SSHApiException("Failed to list directory " + serverInfo.getHost() + ":" + directoryPath);
-		}
-	}
-
-	@Override
-	public Session getSession() throws SSHApiException {
-		return session;
-	}
-
-	@Override
-	public void disconnect() throws SSHApiException {
-		session.disconnect();
-	}
-
-	/**
-	 * This method return <code>true</code> if there is an error in standard output. If not return <code>false</code>
-	 * @param reader - command output reader
-	 * @param submitCommand - command which executed in remote machine.
-	 * @return command has return error or not.
-	 */
-	private void throwExceptionOnError(StandardOutReader reader, RawCommandInfo submitCommand) throws SSHApiException{
-		String stdErrorString = reader.getStdErrorString();
-		String command = submitCommand.getCommand().substring(submitCommand.getCommand().lastIndexOf(File.separator)
-				+ 1);
-		if (stdErrorString == null) {
-			// noting to do
-		}else if ((stdErrorString.contains(command.trim()) && !stdErrorString.contains("Warning")) || stdErrorString
-				.contains("error")) {
-			log.error("Command {} , Standard Error output {}", command, stdErrorString);
-			throw new SSHApiException("Error running command " + command + "  on remote cluster. StandardError: " +
-					stdErrorString);
-		}
-	}
-
-	private void executeCommand(CommandInfo commandInfo, CommandOutput commandOutput) throws SSHApiException {
-		String command = commandInfo.getCommand();
-		ChannelExec channelExec = null;
-		try {
-			if (!session.isConnected()) {
-				session.connect();
-			}
-			channelExec = ((ChannelExec) session.openChannel("exec"));
-			channelExec.setCommand(command);
-		    channelExec.setInputStream(null);
-			channelExec.setErrStream(commandOutput.getStandardError());
-			log.info("Executing command {}", commandInfo.getCommand());
-			channelExec.connect();
-			commandOutput.onOutput(channelExec);
-		} catch (JSchException e) {
-			throw new SSHApiException("Unable to execute command - ", e);
-		}finally {
-			//Only disconnecting the channel, session can be reused
-			if (channelExec != null) {
-				channelExec.disconnect();
-			}
-		}
-	}
-
-	@Override
-	public ServerInfo getServerInfo() {
-		return this.serverInfo;
-	}
-
-	private class DefaultUserInfo implements UserInfo {
-
-		private String userName;
-		private String password;
-		private String passphrase;
-
-		public DefaultUserInfo(String userName, String password, String passphrase) {
-			this.userName = userName;
-			this.password = password;
-			this.passphrase = passphrase;
-		}
-
-		@Override
-		public String getPassphrase() {
-			return null;
-		}
-
-		@Override
-		public String getPassword() {
-			return null;
-		}
-
-		@Override
-		public boolean promptPassword(String s) {
-			return false;
-		}
-
-		@Override
-		public boolean promptPassphrase(String s) {
-			return false;
-		}
-
-		@Override
-		public boolean promptYesNo(String s) {
-			return false;
-		}
-
-		@Override
-		public void showMessage(String s) {
-
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SSHUserInfo.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SSHUserInfo.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SSHUserInfo.java
deleted file mode 100644
index ee630cf..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SSHUserInfo.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.impl;
-
-import com.jcraft.jsch.UserInfo;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 9/20/13
- * Time: 2:31 PM
- */
-
-public class SSHUserInfo implements UserInfo {
-
-    private String password;
-
-    public SSHUserInfo(String pwd) {
-        this.password = pwd;
-    }
-
-    public String getPassphrase() {
-        return this.password;
-    }
-
-    public String getPassword() {
-        return this.password;
-    }
-
-    public boolean promptPassword(String message) {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public boolean promptPassphrase(String message) {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public boolean promptYesNo(String message) {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public void showMessage(String message) {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/StandardOutReader.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/StandardOutReader.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/StandardOutReader.java
deleted file mode 100644
index 6a2d11a..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/StandardOutReader.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.impl;
-
-import com.jcraft.jsch.Channel;
-
-import org.apache.airavata.gfac.core.cluster.CommandOutput;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-public class StandardOutReader implements CommandOutput {
-
-    private static final Logger logger = LoggerFactory.getLogger(StandardOutReader.class);
-    String stdOutputString = null;
-    ByteArrayOutputStream errorStream = new ByteArrayOutputStream();
-    public void onOutput(Channel channel) {
-        try {
-            StringBuffer pbsOutput = new StringBuffer("");
-            InputStream inputStream =  channel.getInputStream();
-            byte[] tmp = new byte[1024];
-            do {
-                while (inputStream.available() > 0) {
-                    int i = inputStream.read(tmp, 0, 1024);
-                    if (i < 0) break;
-                    pbsOutput.append(new String(tmp, 0, i));
-                }
-            } while (!channel.isClosed()) ;
-            String output = pbsOutput.toString();
-            this.setStdOutputString(output);
-        } catch (IOException e) {
-            logger.error(e.getMessage(), e);
-        }
-
-    }
-
-
-    public void exitCode(int code) {
-        System.out.println("Program exit code - " + code);
-    }
-
-    public String getStdOutputString() {
-        return stdOutputString;
-    }
-
-    public void setStdOutputString(String stdOutputString) {
-        this.stdOutputString = stdOutputString;
-    }
-
-    public String getStdErrorString() {
-        return errorStream.toString();
-    }
-
-    public OutputStream getStandardError() {
-        return errorStream;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SystemCommandOutput.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SystemCommandOutput.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SystemCommandOutput.java
deleted file mode 100644
index e2bfd84..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/SystemCommandOutput.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.impl;
-
-import com.jcraft.jsch.Channel;
-import org.apache.airavata.gfac.core.cluster.CommandOutput;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 8/15/13
- * Time: 10:44 AM
- */
-
-public class SystemCommandOutput implements CommandOutput {
-
-    private static final Logger logger = LoggerFactory.getLogger(SystemCommandOutput.class);
-    public void onOutput(Channel channel) {
-        try {
-            InputStream inputStream = channel.getInputStream();
-
-            byte[] tmp = new byte[1024];
-            while (true) {
-                while (inputStream.available() > 0) {
-                    int i = inputStream.read(tmp, 0, 1024);
-                    if (i < 0) break;
-                    System.out.print(new String(tmp, 0, i));
-                }
-                if (channel.isClosed()) {
-                    System.out.println("exit-status: " + channel.getExitStatus());
-                    break;
-                }
-                try {
-                    Thread.sleep(1000);
-                } catch (Exception ignored) {
-                }
-            }
-
-        } catch (IOException e) {
-            logger.error(e.getMessage(), e);
-        }
-
-    }
-
-    public OutputStream getStandardError() {
-        return System.err;
-    }
-
-    public void exitCode(int code) {
-        System.out.println("Program exit code - " + code);
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPasswordAuthenticationInfo.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPasswordAuthenticationInfo.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPasswordAuthenticationInfo.java
deleted file mode 100644
index e56cae3..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPasswordAuthenticationInfo.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.impl.authentication;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 9/20/13
- * Time: 12:15 PM
- */
-
-import org.apache.airavata.gfac.core.authentication.SSHPasswordAuthentication;
-
-/**
- * An authenticator used for raw SSH sessions. Gives SSH user name, password
- * directly.
- * This is only an example implementation.
- */
-public class DefaultPasswordAuthenticationInfo implements SSHPasswordAuthentication {
-
-    private String password;
-
-    public DefaultPasswordAuthenticationInfo(String pwd) {
-        this.password = pwd;
-    }
-
-    public String getPassword(String userName, String hostName) {
-        return password;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyAuthentication.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyAuthentication.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyAuthentication.java
deleted file mode 100644
index 529cccc..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyAuthentication.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.impl.authentication;
-
-import org.apache.airavata.gfac.core.authentication.SSHPublicKeyAuthentication;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 10/4/13
- * Time: 11:44 AM
- */
-
-/**
- * Default public key authentication.
- * Note : This is only a sample implementation.
- */
-public class DefaultPublicKeyAuthentication implements SSHPublicKeyAuthentication {
-
-    private byte[] privateKey;
-    private byte[] publicKey;
-    private String passPhrase = null;
-
-    public DefaultPublicKeyAuthentication(byte[] priv, byte[] pub) {
-        this.privateKey = priv;
-        this.publicKey = pub;
-    }
-
-    public DefaultPublicKeyAuthentication(byte[] priv, byte[] pub, String pass) {
-        this.privateKey = priv;
-        this.publicKey = pub;
-        this.passPhrase = pass;
-    }
-
-    public String getPassPhrase() {
-        return passPhrase;
-    }
-
-    public void bannerMessage(String message) {
-        System.out.println(message);
-    }
-
-    public byte[] getPrivateKey(String userName, String hostName) {
-        return privateKey;
-    }
-
-    public byte[] getPublicKey(String userName, String hostName) {
-        return publicKey;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyFileAuthentication.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyFileAuthentication.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyFileAuthentication.java
deleted file mode 100644
index 67c56cd..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/DefaultPublicKeyFileAuthentication.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.impl.authentication;
-
-import org.apache.airavata.gfac.core.authentication.SSHPublicKeyFileAuthentication;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 10/4/13
- * Time: 11:40 AM
- */
-
-/**
- * Default public key authentication using files.
- * Note : This is only a sample implementation.
- */
-public class DefaultPublicKeyFileAuthentication implements SSHPublicKeyFileAuthentication {
-
-    private String publicKeyFile;
-    private String privateKeyFile;
-    private String passPhrase = null;
-
-    public DefaultPublicKeyFileAuthentication(String pubFile, String privFile) {
-        this.publicKeyFile = pubFile;
-        this.privateKeyFile = privFile;
-
-    }
-
-    public DefaultPublicKeyFileAuthentication(String pubFile, String privFile, String pass) {
-        this.publicKeyFile = pubFile;
-        this.privateKeyFile = privFile;
-        this.passPhrase = pass;
-
-    }
-
-    public String getPassPhrase() {
-        return passPhrase;
-    }
-
-    public void bannerMessage(String message) {
-        System.out.println(message);
-    }
-
-    public String getPublicKeyFile(String userName, String hostName) {
-        return publicKeyFile;
-    }
-
-    public String getPrivateKeyFile(String userName, String hostName) {
-        return privateKeyFile;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/MyProxyAuthenticationInfo.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/MyProxyAuthenticationInfo.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/MyProxyAuthenticationInfo.java
deleted file mode 100644
index a2e2f42..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/authentication/MyProxyAuthenticationInfo.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.impl.authentication;
-
-import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-import org.globus.myproxy.MyProxy;
-import org.globus.myproxy.MyProxyException;
-import org.ietf.jgss.GSSCredential;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 8/14/13
- * Time: 5:22 PM
- */
-
-public class MyProxyAuthenticationInfo extends GSIAuthenticationInfo {
-
-    public static final String X509_CERT_DIR = "X509_CERT_DIR";
-    private String userName;
-    private String password;
-    private String myProxyUrl;
-    private int myProxyPort;
-    private int lifeTime;
-
-    public MyProxyAuthenticationInfo(String userName, String password, String myProxyUrl, int myProxyPort,
-                                     int life, String certificatePath) {
-        this.userName = userName;
-        this.password = password;
-        this.myProxyUrl = myProxyUrl;
-        this.myProxyPort = myProxyPort;
-        this.lifeTime = life;
-        properties.setProperty(X509_CERT_DIR, certificatePath);
-    }
-
-    public String getUserName() {
-        return userName;
-    }
-
-    public void setUserName(String userName) {
-        this.userName = userName;
-    }
-
-    public String getPassword() {
-        return password;
-    }
-
-    public void setPassword(String password) {
-        this.password = password;
-    }
-
-    public String getMyProxyUrl() {
-        return myProxyUrl;
-    }
-
-    public void setMyProxyUrl(String myProxyUrl) {
-        this.myProxyUrl = myProxyUrl;
-    }
-
-    public int getMyProxyPort() {
-        return myProxyPort;
-    }
-
-    public void setMyProxyPort(int myProxyPort) {
-        this.myProxyPort = myProxyPort;
-    }
-
-    public int getLifeTime() {
-        return lifeTime;
-    }
-
-    public void setLifeTime(int lifeTime) {
-        this.lifeTime = lifeTime;
-    }
-
-    public GSSCredential getCredentials() throws SecurityException {
-        return getMyProxyCredentials();
-    }
-
-    private GSSCredential getMyProxyCredentials() throws SecurityException {
-        MyProxy myproxy = new MyProxy(this.myProxyUrl, this.myProxyPort);
-        try {
-            return myproxy.get(this.getUserName(), this.password, this.lifeTime);
-        } catch (MyProxyException e) {
-            throw new SecurityException("Error getting proxy credentials", e);
-        }
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/jsch/ExtendedJSch.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/jsch/ExtendedJSch.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/jsch/ExtendedJSch.java
deleted file mode 100644
index 617d7cd..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/jsch/ExtendedJSch.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.jsch;
-
-import com.jcraft.jsch.ExtendedSession;
-import com.jcraft.jsch.JSch;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.Session;
-import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 8/15/13
- * Time: 10:03 AM
- */
-
-/**
- * Extended JSch to incorporate authentication info.
- */
-public class ExtendedJSch extends JSch {
-
-    private GSIAuthenticationInfo authenticationInfo;
-
-    public ExtendedJSch() {
-        super();
-    }
-
-    public GSIAuthenticationInfo getAuthenticationInfo() {
-        return authenticationInfo;
-    }
-
-    public void setAuthenticationInfo(GSIAuthenticationInfo authenticationInfo) {
-        this.authenticationInfo = authenticationInfo;
-    }
-
-    public Session getSession(String username, String host, int port) throws JSchException {
-
-        if(host==null){
-            throw new JSchException("host must not be null.");
-        }
-        Session s = new ExtendedSession(this, username, host, port);
-        return s;
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/listener/JobSubmissionListener.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/listener/JobSubmissionListener.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/listener/JobSubmissionListener.java
deleted file mode 100644
index dd492ad..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/listener/JobSubmissionListener.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.listener;
-
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-
-/**
- * This interface can be implemented by the end user of the API
- * to do desired operations based on the job status change. API has a
- * default joblistener which can be used by the end users, but its
- * configurable and can be parseSingleJob to jobsubmission methods.
- */
-public abstract class JobSubmissionListener {
-
-    private JobStatus jobStatus = JobStatus.U;
-
-    /**
-     * This can be usd to perform some operation during status change
-     *
-     * @param jobDescriptor
-     * @throws SSHApiException
-     */
-    public abstract void statusChanged(JobDescriptor jobDescriptor) throws SSHApiException;
-
-    /**
-     * This can be usd to perform some operation during status change
-     * @param jobStatus
-     * @throws SSHApiException
-     */
-    public abstract void statusChanged(JobStatus jobStatus) throws SSHApiException;
-
-
-    public JobStatus getJobStatus() {
-        return jobStatus;
-    }
-
-    public void setJobStatus(JobStatus jobStatus) {
-        this.jobStatus = jobStatus;
-    }
-
-    /**
-     * This method is used to block the process until the currentStatus of the job is DONE or FAILED
-     */
-    public void waitFor()  throws SSHApiException{
-        while (!isJobDone()) {
-            synchronized (this) {
-                try {
-                    wait();
-                } catch (InterruptedException e) {}
-            }
-        }
-    }
-
-    /**
-     * BAsed on the implementation user can define how to decide the job done
-     * scenario
-     * @return
-     * @throws SSHApiException
-     */
-    public abstract boolean isJobDone() throws SSHApiException;
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/CommonUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/CommonUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/CommonUtils.java
deleted file mode 100644
index 216459e..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/CommonUtils.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.util;
-
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.gsi.ssh.api.job.*;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-
-public class CommonUtils {
-    /**
-     * This returns true if the give job is finished
-     * otherwise false
-     *
-     * @param job
-     * @return
-     */
-    public static boolean isJobFinished(JobDescriptor job) {
-        if (JobStatus.C.toString().equals(job.getStatus())) {
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    /**
-     * This will read
-     *
-     * @param maxWalltime
-     * @return
-     */
-    public static String maxWallTimeCalculator(int maxWalltime) {
-        if (maxWalltime < 60) {
-            return "00:" + maxWalltime + ":00";
-        } else {
-            int minutes = maxWalltime % 60;
-            int hours = maxWalltime / 60;
-            return hours + ":" + minutes + ":00";
-        }
-    }
-    public static String maxWallTimeCalculatorForLSF(int maxWalltime) {
-        if (maxWalltime < 60) {
-            return "00:" + maxWalltime;
-        } else {
-            int minutes = maxWalltime % 60;
-            int hours = maxWalltime / 60;
-            return hours + ":" + minutes;
-        }
-    }
-    public static JobManagerConfiguration getPBSJobManager(String installedPath) {
-        return new PBSJobConfiguration("PBSTemplate.xslt",".pbs", installedPath, new PBSOutputParser());
-    }
-
-    public static JobManagerConfiguration getSLURMJobManager(String installedPath) {
-        return new SlurmJobConfiguration("SLURMTemplate.xslt", ".slurm", installedPath, new SlurmOutputParser());
-    }
-
-     public static JobManagerConfiguration getUGEJobManager(String installedPath) {
-        return new UGEJobConfiguration("UGETemplate.xslt", ".pbs", installedPath, new UGEOutputParser());
-    }
-
-    public static JobManagerConfiguration getLSFJobManager(String installedPath) {
-        return new LSFJobConfiguration("LSFTemplate.xslt", ".lsf", installedPath, new LSFOutputParser());
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHAPIUIKeyboardInteractive.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHAPIUIKeyboardInteractive.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHAPIUIKeyboardInteractive.java
deleted file mode 100644
index 3f60a59..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHAPIUIKeyboardInteractive.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.util;
-
-import com.jcraft.jsch.UIKeyboardInteractive;
-import com.jcraft.jsch.UserInfo;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 10/4/13
- * Time: 8:34 AM
- */
-
-/**
- * This is dummy class, the keyboard interactivity is not really used when acting as an API.
- * But to get things working we have this.
- */
-public class SSHAPIUIKeyboardInteractive implements UIKeyboardInteractive, UserInfo {
-
-    private String password;
-
-    public SSHAPIUIKeyboardInteractive(String pwd) {
-        this.password = pwd;
-    }
-
-    public String[] promptKeyboardInteractive(String destination, String name,
-                                              String instruction, String[] prompt, boolean[] echo) {
-        return null;
-    }
-
-    public String getPassphrase() {
-        return password;
-    }
-
-    public String getPassword() {
-        return password;
-    }
-
-    public boolean promptPassword(String message) {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public boolean promptPassphrase(String message) {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public boolean promptYesNo(String message) {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public void showMessage(String message) {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHKeyPasswordHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHKeyPasswordHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHKeyPasswordHandler.java
deleted file mode 100644
index ad9a5d5..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHKeyPasswordHandler.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gsi.ssh.util;
-
-import com.jcraft.jsch.UserInfo;
-import org.apache.airavata.gfac.core.authentication.SSHKeyAuthentication;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 10/4/13
- * Time: 2:22 PM
- */
-
-/**
- * This class is used to get the pass phrase to decrypt public/private keys.
- */
-public class SSHKeyPasswordHandler implements UserInfo {
-
-    private SSHKeyAuthentication keyAuthenticationHandler;
-
-    public SSHKeyPasswordHandler(SSHKeyAuthentication handler) {
-        this.keyAuthenticationHandler = handler;
-    }
-
-    public String getPassphrase() {
-        return keyAuthenticationHandler.getPassPhrase();
-    }
-
-    public String getPassword() {
-        throw new NotImplementedException();
-    }
-
-    public boolean promptPassword(String message) {
-        return false;
-    }
-
-    public boolean promptPassphrase(String message) {
-        return true;
-    }
-
-    public boolean promptYesNo(String message) {
-        return false;
-    }
-
-    public void showMessage(String message) {
-        keyAuthenticationHandler.bannerMessage(message);
-    }
-}


[2/7] airavata git commit: Removed gsi related code

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPushMonitorHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPushMonitorHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPushMonitorHandler.java
deleted file mode 100644
index d6da22a..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPushMonitorHandler.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.handlers;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.Properties;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.handler.ThreadedHandler;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gfac.monitor.HPCMonitorID;
-import org.apache.airavata.gfac.monitor.impl.push.amqp.AMQPMonitor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- *   this handler is responsible monitoring jobs in push mode
- *   and currently this support multiple push monitoring in grid resource
- */
-public class GridPushMonitorHandler extends ThreadedHandler {
-    private final static Logger logger= LoggerFactory.getLogger(GridPushMonitorHandler.class);
-
-    private AMQPMonitor amqpMonitor;
-
-    private AuthenticationInfo authenticationInfo;
-
-    @Override
-    public void initProperties(Properties properties) throws GFacHandlerException {
-        String myProxyUser=null;
-        try{
-            myProxyUser = ServerSettings.getSetting("myproxy.username");
-            String myProxyPass = ServerSettings.getSetting("myproxy.password");
-            String certPath = ServerSettings.getSetting("trusted.cert.location");
-            String myProxyServer = ServerSettings.getSetting("myproxy.server");
-            setAuthenticationInfo(new MyProxyAuthenticationInfo(myProxyUser, myProxyPass, myProxyServer,
-                    7512, 17280000, certPath));
-
-            String hostList=(String)properties.get("hosts");
-            String proxyFilePath = ServerSettings.getSetting("proxy.file.path");
-            String connectionName=ServerSettings.getSetting("connection.name");
-            LinkedBlockingQueue<MonitorID> pushQueue = new LinkedBlockingQueue<MonitorID>();
-            LinkedBlockingQueue<MonitorID> finishQueue = new LinkedBlockingQueue<MonitorID>();
-            List<String> hosts= Arrays.asList(hostList.split(","));
-            amqpMonitor=new AMQPMonitor(null,pushQueue,finishQueue,proxyFilePath,connectionName,hosts);
-        }catch (ApplicationSettingsException e){
-            logger.error(e.getMessage(), e);
-            throw new GFacHandlerException(e.getMessage(), e);
-        }
-    }
-
-    @Override
-    public void run() {
-        amqpMonitor.run();
-    }
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException{
-        super.invoke(jobExecutionContext);
-        MonitorID monitorID=new HPCMonitorID(getAuthenticationInfo(),jobExecutionContext);
-        amqpMonitor.getRunningQueue().add(monitorID);
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    public AMQPMonitor getAmqpMonitor() {
-        return amqpMonitor;
-    }
-
-    public void setAmqpMonitor(AMQPMonitor amqpMonitor) {
-        this.amqpMonitor = amqpMonitor;
-    }
-
-    public AuthenticationInfo getAuthenticationInfo() {
-        return authenticationInfo;
-    }
-
-    public void setAuthenticationInfo(AuthenticationInfo authenticationInfo) {
-        this.authenticationInfo = authenticationInfo;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
deleted file mode 100644
index 79a4a8e..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ /dev/null
@@ -1,471 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.impl.pull.qstat;
-
-import com.google.common.eventbus.EventBus;
-import org.apache.airavata.common.logger.AiravataLogger;
-import org.apache.airavata.common.logger.AiravataLoggerFactory;
-import org.apache.airavata.common.utils.LocalEventPublisher;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.impl.OutHandlerWorker;
-import org.apache.airavata.gfac.monitor.util.CommonUtils;
-import org.apache.airavata.gfac.core.GFac;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.core.GFacThreadPoolExecutor;
-import org.apache.airavata.gfac.monitor.HostMonitorData;
-import org.apache.airavata.gfac.monitor.UserMonitorData;
-import org.apache.airavata.gfac.monitor.core.PullMonitor;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.gfac.monitor.impl.push.amqp.SimpleJobFinishConsumer;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.messaging.event.JobIdentifier;
-import org.apache.airavata.model.messaging.event.JobStatusChangeRequestEvent;
-import org.apache.airavata.model.experiment.JobState;
-
-import java.sql.Timestamp;
-import java.util.*;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.LinkedBlockingQueue;
-
-/**
- * This monitor is based on qstat command which can be run
- * in grid resources and retrieve the job status.
- */
-public class HPCPullMonitor extends PullMonitor {
-
-    private final static AiravataLogger logger = AiravataLoggerFactory.getLogger(HPCPullMonitor.class);
-    public static final int FAILED_COUNT = 5;
-
-    // I think this should use DelayedBlocking Queue to do the monitoring*/
-    private BlockingQueue<UserMonitorData> queue;
-
-    private boolean startPulling = false;
-
-    private Map<String, ResourceConnection> connections;
-
-    private LocalEventPublisher publisher;
-
-    private LinkedBlockingQueue<String> cancelJobList;
-
-    private List<String> completedJobsFromPush;
-
-    private GFac gfac;
-
-    private AuthenticationInfo authenticationInfo;
-
-    private ArrayList<MonitorID> removeList;
-
-    public HPCPullMonitor() {
-        connections = new HashMap<String, ResourceConnection>();
-        queue = new LinkedBlockingDeque<UserMonitorData>();
-        publisher = new LocalEventPublisher(new EventBus());
-        cancelJobList = new LinkedBlockingQueue<String>();
-        completedJobsFromPush = new ArrayList<String>();
-        (new SimpleJobFinishConsumer(this.completedJobsFromPush)).listen();
-        removeList = new ArrayList<MonitorID>();
-    }
-
-    public HPCPullMonitor(LocalEventPublisher localEventPublisher, AuthenticationInfo authInfo) {
-        connections = new HashMap<String, ResourceConnection>();
-        queue = new LinkedBlockingDeque<UserMonitorData>();
-        publisher = localEventPublisher;
-        authenticationInfo = authInfo;
-        cancelJobList = new LinkedBlockingQueue<String>();
-        this.completedJobsFromPush = new ArrayList<String>();
-        (new SimpleJobFinishConsumer(this.completedJobsFromPush)).listen();
-        removeList = new ArrayList<MonitorID>();
-    }
-
-    public HPCPullMonitor(BlockingQueue<UserMonitorData> queue, LocalEventPublisher publisher) {
-        this.queue = queue;
-        this.publisher = publisher;
-        connections = new HashMap<String, ResourceConnection>();
-        cancelJobList = new LinkedBlockingQueue<String>();
-        this.completedJobsFromPush = new ArrayList<String>();
-        (new SimpleJobFinishConsumer(this.completedJobsFromPush)).listen();
-        removeList = new ArrayList<MonitorID>();
-    }
-
-
-    public void run() {
-        /* implement a logic to pick each monitorID object from the queue and do the
-        monitoring
-         */
-        this.startPulling = true;
-        while (this.startPulling && !ServerSettings.isStopAllThreads()) {
-            try {
-                // After finishing one iteration of the full queue this thread sleeps 1 second
-                synchronized (this.queue) {
-                    if (this.queue.size() > 0) {
-                        startPulling();
-                }
-            }
-                Thread.sleep(10000);
-            } catch (Exception e) {
-                // we catch all the exceptions here because no matter what happens we do not stop running this
-                // thread, but ideally we should report proper error messages, but this is handled in startPulling
-                // method, incase something happen in Thread.sleep we handle it with this catch block.
-                logger.error(e.getMessage(),e);
-            }
-        }
-        // thread is going to return so we close all the connections
-        Iterator<String> iterator = connections.keySet().iterator();
-        while (iterator.hasNext()) {
-            String next = iterator.next();
-            ResourceConnection resourceConnection = connections.get(next);
-            try {
-                resourceConnection.getCluster().disconnect();
-            } catch (SSHApiException e) {
-                logger.error("Erro while connecting to the cluster", e);
-            }
-        }
-    }
-
-    /**
-     * This method will can invoke when PullMonitor needs to start
-     * and it has to invoke in the frequency specified below,
-     *
-     * @return if the start process is successful return true else false
-     */
-     public boolean startPulling() throws AiravataMonitorException {
-        // take the top element in the queue and pull the data and put that element
-        // at the tail of the queue
-        //todo this polling will not work with multiple usernames but with single user
-        // and multiple hosts, currently monitoring will work
-        UserMonitorData take = null;
-        JobStatusChangeRequestEvent jobStatus = new JobStatusChangeRequestEvent();
-        MonitorID currentMonitorID = null;
-        try {
-            take = this.queue.take();
-            List<HostMonitorData> hostMonitorData = take.getHostMonitorData();
-            for (ListIterator<HostMonitorData> hostIterator = hostMonitorData.listIterator(); hostIterator.hasNext();) {
-                HostMonitorData iHostMonitorData = hostIterator.next();
-                if (iHostMonitorData.getJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
-                    String hostName = iHostMonitorData.getComputeResourceDescription().getHostName();
-                    ResourceConnection connection = null;
-                    if (connections.containsKey(hostName)) {
-                        if (!connections.get(hostName).isConnected()) {
-                            connection = new ResourceConnection(iHostMonitorData, getAuthenticationInfo());
-                            connections.put(hostName, connection);
-                        } else {
-                            logger.debug("We already have this connection so not going to create one");
-                            connection = connections.get(hostName);
-                        }
-                    } else {
-                        connection = new ResourceConnection(iHostMonitorData, getAuthenticationInfo());
-                        connections.put(hostName, connection);
-                    }
-
-                    // before we get the statuses, we check the cancel job list and remove them permanently
-                    List<MonitorID> monitorID = iHostMonitorData.getMonitorIDs();
-                    Iterator<String> iterator1 = cancelJobList.iterator();
-                    ListIterator<MonitorID> monitorIDListIterator = monitorID.listIterator();
-                    while (monitorIDListIterator.hasNext()) {
-                        MonitorID iMonitorID = monitorIDListIterator.next();
-                        while (iterator1.hasNext()) {
-                            String cancelMId = iterator1.next();
-                            if (cancelMId.equals(iMonitorID.getExperimentID() + "+" + iMonitorID.getTaskID())) {
-                                iMonitorID.setStatus(JobState.CANCELED);
-//                                CommonUtils.removeMonitorFromQueue(take, iMonitorID);
-                                removeList.add(iMonitorID);
-                                logger.debugId(cancelMId, "Found a match in cancel monitor queue, hence moved to the " +
-                                                "completed job queue, experiment {}, task {} , job {}",
-                                        iMonitorID.getExperimentID(), iMonitorID.getTaskID(), iMonitorID.getJobID());
-                                logger.info("Job cancelled: marking the Job as ************CANCELLED************ experiment {}, task {}, job name {} .",
-                                        iMonitorID.getExperimentID(), iMonitorID.getTaskID(), iMonitorID.getJobName());
-                                sendNotification(iMonitorID);
-                                logger.info("To avoid timing issues we sleep sometime and try to retrieve output files");
-                                Thread.sleep(10000);
-                                GFacThreadPoolExecutor.getCachedThreadPool().execute(new OutHandlerWorker(gfac, iMonitorID, publisher));
-                                break;
-                            }
-                        }
-                        iterator1 = cancelJobList.iterator();
-                    }
-
-                    cleanup(take);
-
-                    synchronized (completedJobsFromPush) {
-                        for (ListIterator<String> iterator = completedJobsFromPush.listIterator(); iterator.hasNext(); ) {
-                            String completeId = iterator.next();
-                            for (monitorIDListIterator = monitorID.listIterator(); monitorIDListIterator.hasNext(); ) {
-                                MonitorID iMonitorID = monitorIDListIterator.next();
-                                if (completeId.equals(iMonitorID.getUserName() + "," + iMonitorID.getJobName())) {
-                                    logger.info("This job is finished because push notification came with <username,jobName> " + completeId);
-                                    iMonitorID.setStatus(JobState.COMPLETE);
-//                                    CommonUtils.removeMonitorFromQueue(take, iMonitorID);//we have to make this empty everytime we iterate, otherwise this list will accumulate and will lead to a memory leak
-                                    removeList.add(iMonitorID);
-                                    logger.debugId(completeId, "Push notification updated job {} status to {}. " +
-                                                    "experiment {} , task {}.", iMonitorID.getJobID(), JobState.COMPLETE.toString(),
-                                            iMonitorID.getExperimentID(), iMonitorID.getTaskID());
-                                    logger.info("AMQP message recieved: marking the Job as ************COMPLETE************ experiment {}, task {}, job name {} .",
-                                            iMonitorID.getExperimentID(), iMonitorID.getTaskID(), iMonitorID.getJobName());
-
-                                    sendNotification(iMonitorID);
-                                    logger.info("To avoid timing issues we sleep sometime and try to retrieve output files");
-                                    Thread.sleep(10000);
-                                    GFacThreadPoolExecutor.getCachedThreadPool().execute(new OutHandlerWorker(gfac, iMonitorID, publisher));
-                                    break;
-                                }
-                            }
-                        }
-                    }
-
-                    cleanup(take);
-
-                    // we have to get this again because we removed the already completed jobs with amqp messages
-                    monitorID = iHostMonitorData.getMonitorIDs();
-                    Map<String, JobState> jobStatuses = connection.getJobStatuses(monitorID);
-                    for (Iterator<MonitorID> iterator = monitorID.listIterator(); iterator.hasNext(); ) {
-                        MonitorID iMonitorID = iterator.next();
-                        currentMonitorID = iMonitorID;
-                        if (!JobState.CANCELED.equals(iMonitorID.getStatus()) &&
-                                !JobState.COMPLETE.equals(iMonitorID.getStatus())) {
-                            iMonitorID.setStatus(jobStatuses.get(iMonitorID.getJobID() + "," + iMonitorID.getJobName()));    //IMPORTANT this is NOT a simple setter we have a logic
-                        } else if (JobState.COMPLETE.equals(iMonitorID.getStatus())) {
-                            logger.debugId(iMonitorID.getJobID(), "Moved job {} to completed jobs map, experiment {}, " +
-                                    "task {}", iMonitorID.getJobID(), iMonitorID.getExperimentID(), iMonitorID.getTaskID());
-//                            CommonUtils.removeMonitorFromQueue(take, iMonitorID);
-                            removeList.add(iMonitorID);
-                            logger.info("PULL Notification is complete: marking the Job as ************COMPLETE************ experiment {}, task {}, job name {} .",
-                                    iMonitorID.getExperimentID(), iMonitorID.getTaskID(), iMonitorID.getJobName());
-                            GFacThreadPoolExecutor.getCachedThreadPool().execute(new OutHandlerWorker(gfac, iMonitorID, publisher));
-                        }
-                        iMonitorID.setStatus(jobStatuses.get(iMonitorID.getJobID() + "," + iMonitorID.getJobName()));    //IMPORTANT this is not a simple setter we have a logic
-                        iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
-                        sendNotification(iMonitorID);
-                        // if the job is completed we do not have to put the job to the queue again
-                        iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
-                    }
-
-                    cleanup(take);
-
-
-                    for (Iterator<MonitorID> iterator = monitorID.listIterator(); iterator.hasNext(); ) {
-                        MonitorID iMonitorID = iterator.next();
-                        if (iMonitorID.getFailedCount() > FAILED_COUNT) {
-                            iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
-                            String outputDir = iMonitorID.getJobExecutionContext().getOutputDir();
-                            List<String> stdOut = null;
-                            try {
-                                stdOut = connection.getCluster().listDirectory(outputDir); // check the outputs directory
-                            } catch (SSHApiException e) {
-                                if (e.getMessage().contains("No such file or directory")) {
-                                    // this is because while we run output handler something failed and during exception
-                                    // we store all the jobs in the monitor queue again
-                                    logger.error("We know this  job is already attempted to run out-handlers");
-//                                    CommonUtils.removeMonitorFromQueue(queue, iMonitorID);
-                                }
-                            }
-                            if (stdOut != null && stdOut.size() > 0 && !stdOut.get(0).isEmpty()) { // have to be careful with this
-                                iMonitorID.setStatus(JobState.COMPLETE);
-                                logger.errorId(iMonitorID.getJobID(), "Job monitoring failed {} times, " +
-                                                " Experiment {} , task {}", iMonitorID.getFailedCount(),
-                                        iMonitorID.getExperimentID(), iMonitorID.getTaskID());
-                                logger.info("Listing directory came as complete: marking the Job as ************COMPLETE************ experiment {}, task {}, job name {} .",
-                                        iMonitorID.getExperimentID(), iMonitorID.getTaskID(), iMonitorID.getJobName());
-                                sendNotification(iMonitorID);
-//                                CommonUtils.removeMonitorFromQueue(take, iMonitorID);
-                                removeList.add(iMonitorID);
-                                GFacThreadPoolExecutor.getCachedThreadPool().execute(new OutHandlerWorker(gfac, iMonitorID, publisher));
-                            } else {
-                                iMonitorID.setFailedCount(0);
-                            }
-                        } else {
-                            // Evey
-                            iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
-                            // if the job is complete we remove it from the Map, if any of these maps
-                            // get empty this userMonitorData will get delete from the queue
-                        }
-                    }
-
-                    cleanup(take);
-
-
-                } else {
-                    logger.debug("Qstat Monitor doesn't handle non-gsissh hosts , host {}", iHostMonitorData.
-                            getComputeResourceDescription().getHostName());
-                }
-            }
-            // We have finished all the HostMonitorData object in userMonitorData, now we need to put it back
-            // now the userMonitorData goes back to the tail of the queue
-            // during individual monitorID removal we remove the HostMonitorData object if it become empty
-            // so if all the jobs are finished for all the hostMOnitorId objects in userMonitorData object
-            // we should remove it from the queue so here we do not put it back.
-            for (ListIterator<HostMonitorData> iterator1 = take.getHostMonitorData().listIterator(); iterator1.hasNext(); ) {
-                HostMonitorData iHostMonitorID = iterator1.next();
-                if (iHostMonitorID.getMonitorIDs().size() == 0) {
-                    iterator1.remove();
-                    logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getComputeResourceDescription().getHostName());
-                }
-            }
-            if(take.getHostMonitorData().size()!=0) {
-                queue.put(take);
-            }
-        } catch (InterruptedException e) {
-            if (!this.queue.contains(take)) {
-                try {
-                    this.queue.put(take);
-                } catch (InterruptedException e1) {
-                    e1.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-                }
-            }
-            logger.error("Error handling the job with Job ID:" + currentMonitorID.getJobID());
-            throw new AiravataMonitorException(e);
-        } catch (SSHApiException e) {
-            logger.error(e.getMessage());
-            if (e.getMessage().contains("Unknown Job Id Error")) {
-                // in this case job is finished or may be the given job ID is wrong
-                jobStatus.setState(JobState.UNKNOWN);
-                JobIdentifier jobIdentifier = new JobIdentifier("UNKNOWN", "UNKNOWN", "UNKNOWN", "UNKNOWN", "UNKNOWN");
-                if (currentMonitorID != null){
-                    jobIdentifier.setExperimentId(currentMonitorID.getExperimentID());
-                    jobIdentifier.setTaskId(currentMonitorID.getTaskID());
-                    jobIdentifier.setWorkflowNodeId(currentMonitorID.getWorkflowNodeID());
-                    jobIdentifier.setJobId(currentMonitorID.getJobID());
-                    jobIdentifier.setGatewayId(currentMonitorID.getJobExecutionContext().getGatewayID());
-                }
-                jobStatus.setJobIdentity(jobIdentifier);
-                publisher.publish(jobStatus);
-            } else if (e.getMessage().contains("illegally formed job identifier")) {
-                logger.error("Wrong job ID is given so dropping the job from monitoring system");
-            } else if (!this.queue.contains(take)) {
-                try {
-                    queue.put(take);
-                } catch (InterruptedException e1) {
-                    e1.printStackTrace();
-                }
-            }
-            throw new AiravataMonitorException("Error retrieving the job status", e);
-        } catch (Exception e) {
-            try {
-                queue.put(take);
-            } catch (InterruptedException e1) {
-                e1.printStackTrace();
-            }
-            throw new AiravataMonitorException("Error retrieving the job status", e);
-        }
-        return true;
-    }
-
-    private void sendNotification(MonitorID iMonitorID) {
-        JobStatusChangeRequestEvent jobStatus = new JobStatusChangeRequestEvent();
-        JobIdentifier jobIdentity = new JobIdentifier(iMonitorID.getJobID(),
-                iMonitorID.getTaskID(),
-                iMonitorID.getWorkflowNodeID(),
-                iMonitorID.getExperimentID(),
-                iMonitorID.getJobExecutionContext().getGatewayID());
-        jobStatus.setJobIdentity(jobIdentity);
-        jobStatus.setState(iMonitorID.getStatus());
-        // we have this JobStatus class to handle amqp monitoring
-        logger.debugId(jobStatus.getJobIdentity().getJobId(), "Published job status change request, " +
-                "experiment {} , task {}", jobStatus.getJobIdentity().getExperimentId(),
-        jobStatus.getJobIdentity().getTaskId());
-
-        publisher.publish(jobStatus);
-    }
-
-    /**
-     * This is the method to stop the polling process
-     *
-     * @return if the stopping process is successful return true else false
-     */
-    public boolean stopPulling() {
-        this.startPulling = false;
-        return true;
-    }
-
-    public LocalEventPublisher getPublisher() {
-        return publisher;
-    }
-
-    public void setPublisher(LocalEventPublisher publisher) {
-        this.publisher = publisher;
-    }
-
-    public BlockingQueue<UserMonitorData> getQueue() {
-        return queue;
-    }
-
-    public void setQueue(BlockingQueue<UserMonitorData> queue) {
-        this.queue = queue;
-    }
-
-    public boolean authenticate() {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public Map<String, ResourceConnection> getConnections() {
-        return connections;
-    }
-
-    public boolean isStartPulling() {
-        return startPulling;
-    }
-
-    public void setConnections(Map<String, ResourceConnection> connections) {
-        this.connections = connections;
-    }
-
-    public void setStartPulling(boolean startPulling) {
-        this.startPulling = startPulling;
-    }
-
-    public GFac getGfac() {
-        return gfac;
-    }
-
-    public void setGfac(GFac gfac) {
-        this.gfac = gfac;
-    }
-
-    public AuthenticationInfo getAuthenticationInfo() {
-        return authenticationInfo;
-    }
-
-    public void setAuthenticationInfo(AuthenticationInfo authenticationInfo) {
-        this.authenticationInfo = authenticationInfo;
-    }
-
-    public LinkedBlockingQueue<String> getCancelJobList() {
-        return cancelJobList;
-    }
-
-    public void setCancelJobList(LinkedBlockingQueue<String> cancelJobList) {
-        this.cancelJobList = cancelJobList;
-    }
-
-
-    private void cleanup(UserMonitorData userMonitorData){
-        for(MonitorID iMonitorId:removeList){
-            try {
-                CommonUtils.removeMonitorFromQueue(userMonitorData, iMonitorId);
-            } catch (AiravataMonitorException e) {
-                logger.error(e.getMessage(), e);
-                logger.error("Error deleting the monitor data: " + iMonitorId.getJobID());
-            }
-        }
-        removeList.clear();
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
deleted file mode 100644
index 26add1f..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.impl.pull.qstat;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.SecurityContext;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.monitor.HostMonitorData;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.JobStatus;
-import org.apache.airavata.model.experiment.JobState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-
-public class ResourceConnection {
-    private static final Logger log = LoggerFactory.getLogger(ResourceConnection.class);
-
-    private HPCRemoteCluster cluster;
-
-    private AuthenticationInfo authenticationInfo;
-
-
-    public ResourceConnection(HostMonitorData hostMonitorData,AuthenticationInfo authInfo) throws SSHApiException {
-        MonitorID monitorID = hostMonitorData.getMonitorIDs().get(0);
-        try {
-            SecurityContext securityContext = monitorID.getJobExecutionContext().getSecurityContext(monitorID.getComputeResourceDescription().getHostName());
-            if(securityContext != null) {
-                if (securityContext instanceof GSISecurityContext) {
-                    GSISecurityContext gsiSecurityContext = (GSISecurityContext) securityContext;
-                    cluster = (HPCRemoteCluster) gsiSecurityContext.getRemoteCluster();
-                } else if (securityContext instanceof  SSHSecurityContext) {
-                    SSHSecurityContext sshSecurityContext = (SSHSecurityContext)
-                            securityContext;
-                    cluster = (HPCRemoteCluster) sshSecurityContext.getRemoteCluster();
-                }
-            }
-            // we just use cluster configuration from the incoming request and construct a new cluster because for monitoring
-            // we are using our own credentials and not using one users account to do everything.
-            authenticationInfo = authInfo;
-        } catch (GFacException e) {
-            log.error("Error reading data from job ExecutionContext");
-        }
-    }
-
-    public ResourceConnection(HostMonitorData hostMonitorData) throws SSHApiException {
-        MonitorID monitorID = hostMonitorData.getMonitorIDs().get(0);
-        try {
-            GSISecurityContext securityContext = (GSISecurityContext)
-                    monitorID.getJobExecutionContext().getSecurityContext(monitorID.getComputeResourceDescription().getHostName());
-            cluster = (HPCRemoteCluster) securityContext.getRemoteCluster();
-
-            // we just use cluster configuration from the incoming request and construct a new cluster because for monitoring
-            // we are using our own credentials and not using one users account to do everything.
-            cluster = new HPCRemoteCluster(cluster.getServerInfo(), authenticationInfo, cluster.getJobManagerConfiguration());
-        } catch (GFacException e) {
-            log.error("Error reading data from job ExecutionContext");
-        }
-    }
-
-    public JobState getJobStatus(MonitorID monitorID) throws SSHApiException {
-        String jobID = monitorID.getJobID();
-        //todo so currently we execute the qstat for each job but we can use user based monitoring
-        //todo or we should concatenate all the commands and execute them in one go and parseSingleJob the response
-        return getStatusFromString(cluster.getJobStatus(jobID).toString());
-    }
-
-    public Map<String, JobState> getJobStatuses(List<MonitorID> monitorIDs) throws SSHApiException {
-        Map<String, JobStatus> treeMap = new TreeMap<String, JobStatus>();
-        Map<String, JobState> treeMap1 = new TreeMap<String, JobState>();
-        // creating a sorted map with all the jobIds and with the predefined
-        // status as UNKNOWN
-        for (MonitorID monitorID : monitorIDs) {
-            treeMap.put(monitorID.getJobID()+","+monitorID.getJobName(), JobStatus.U);
-        }
-        String userName = cluster.getServerInfo().getUserName();
-        //todo so currently we execute the qstat for each job but we can use user based monitoring
-        //todo or we should concatenate all the commands and execute them in one go and parseSingleJob the response
-        //
-        cluster.getJobStatuses(userName, treeMap);
-        for (String key : treeMap.keySet()) {
-            treeMap1.put(key, getStatusFromString(treeMap.get(key).toString()));
-        }
-        return treeMap1;
-    }
-
-    private JobState getStatusFromString(String status) {
-        log.info("parsing the job status returned : " + status);
-        if (status != null) {
-            if ("C".equals(status) || "CD".equals(status) || "E".equals(status) || "CG".equals(status) || "DONE".equals(status)) {
-                return JobState.COMPLETE;
-            } else if ("H".equals(status) || "h".equals(status)) {
-                return JobState.HELD;
-            } else if ("Q".equals(status) || "qw".equals(status) || "PEND".equals(status)) {
-                return JobState.QUEUED;
-            } else if ("R".equals(status) || "CF".equals(status) || "r".equals(status) || "RUN".equals(status)) {
-                return JobState.ACTIVE;
-            } else if ("T".equals(status)) {
-                return JobState.HELD;
-            } else if ("W".equals(status) || "PD".equals(status)) {
-                return JobState.QUEUED;
-            } else if ("S".equals(status) || "PSUSP".equals(status) || "USUSP".equals(status) || "SSUSP".equals(status)) {
-                return JobState.SUSPENDED;
-            } else if ("CA".equals(status)) {
-                return JobState.CANCELED;
-            } else if ("F".equals(status) || "NF".equals(status) || "TO".equals(status) || "EXIT".equals(status)) {
-                return JobState.FAILED;
-            } else if ("PR".equals(status) || "Er".equals(status)) {
-                return JobState.FAILED;
-            } else if ("U".equals(status) || ("UNKWN".equals(status))) {
-                return JobState.UNKNOWN;
-            }
-        }
-        return JobState.UNKNOWN;
-    }
-
-    public HPCRemoteCluster getCluster() {
-        return cluster;
-    }
-
-    public void setCluster(HPCRemoteCluster cluster) {
-        this.cluster = cluster;
-    }
-
-    public boolean isConnected(){
-        return this.cluster.getSession().isConnected();
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
deleted file mode 100644
index a946075..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.impl.push.amqp;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.BlockingQueue;
-
-import org.apache.airavata.common.utils.LocalEventPublisher;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.core.PushMonitor;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.gfac.monitor.util.AMQPConnectionUtil;
-import org.apache.airavata.gfac.monitor.util.CommonUtils;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.messaging.event.JobIdentifier;
-import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-import org.apache.airavata.model.experiment.JobState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.eventbus.EventBus;
-import com.google.common.eventbus.Subscribe;
-import com.rabbitmq.client.Channel;
-import com.rabbitmq.client.Connection;
-
-/**
- * This is the implementation for AMQP based finishQueue, this uses
- * rabbitmq client to recieve AMQP based monitoring data from
- * mostly excede resources.
- */
-public class AMQPMonitor extends PushMonitor {
-    private final static Logger logger = LoggerFactory.getLogger(AMQPMonitor.class);
-
-
-    /* this will keep all the channels available in the system, we do not create
-      channels for all the jobs submitted, but we create channels for each user for each
-      host.
-    */
-    private Map<String, Channel> availableChannels;
-
-    private LocalEventPublisher publisher;
-
-    private LocalEventPublisher localPublisher;
-
-    private BlockingQueue<MonitorID> runningQueue;
-
-    private BlockingQueue<MonitorID> finishQueue;
-
-    private String connectionName;
-
-    private String proxyPath;
-
-    private List<String> amqpHosts;
-
-    private boolean startRegister;
-
-    public AMQPMonitor(){
-
-    }
-    public AMQPMonitor(LocalEventPublisher publisher, BlockingQueue<MonitorID> runningQueue,
-                       BlockingQueue<MonitorID> finishQueue,
-                       String proxyPath,String connectionName,List<String> hosts) {
-        this.publisher = publisher;
-        this.runningQueue = runningQueue;        // these will be initialized by the MonitorManager
-        this.finishQueue = finishQueue;          // these will be initialized by the MonitorManager
-        this.availableChannels = new HashMap<String, Channel>();
-        this.connectionName = connectionName;
-        this.proxyPath = proxyPath;
-        this.amqpHosts = hosts;
-        this.localPublisher = new LocalEventPublisher(new EventBus());
-        this.localPublisher.registerListener(this);
-    }
-
-    public void initialize(String proxyPath, String connectionName, List<String> hosts) {
-        this.availableChannels = new HashMap<String, Channel>();
-        this.connectionName = connectionName;
-        this.proxyPath = proxyPath;
-        this.amqpHosts = hosts;
-        this.localPublisher = new LocalEventPublisher(new EventBus());
-        this.localPublisher.registerListener(this);
-    }
-
-    @Override
-    public boolean registerListener(MonitorID monitorID) throws AiravataMonitorException {
-        // we subscribe to read user-host based subscription
-        ComputeResourceDescription computeResourceDescription = monitorID.getComputeResourceDescription();
-        if (computeResourceDescription.isSetIpAddresses() && computeResourceDescription.getIpAddresses().size() > 0) {
-            // we get first ip address for the moment
-            String hostAddress = computeResourceDescription.getIpAddresses().get(0);
-            // in amqp case there are no multiple jobs per each host, because once a job is put in to the queue it
-            // will be picked by the Monitor, so jobs will not stay in this queueu but jobs will stay in finishQueue
-            String channelID = CommonUtils.getChannelID(monitorID);
-            if (availableChannels.get(channelID) == null) {
-                try {
-                    //todo need to fix this rather getting it from a file
-                    Connection connection = AMQPConnectionUtil.connect(amqpHosts, connectionName, proxyPath);
-                    Channel channel = null;
-                    channel = connection.createChannel();
-                    availableChannels.put(channelID, channel);
-                    String queueName = channel.queueDeclare().getQueue();
-
-                    BasicConsumer consumer = new
-                            BasicConsumer(new JSONMessageParser(), localPublisher);          // here we use local publisher
-                    channel.basicConsume(queueName, true, consumer);
-                    String filterString = CommonUtils.getRoutingKey(monitorID.getUserName(), hostAddress);
-                    // here we queuebind to a particular user in a particular machine
-                    channel.queueBind(queueName, "glue2.computing_activity", filterString);
-                    logger.info("Using filtering string to monitor: " + filterString);
-                } catch (IOException e) {
-                    logger.error("Error creating the connection to finishQueue the job:" + monitorID.getUserName());
-                }
-            }
-        } else {
-            throw new AiravataMonitorException("Couldn't register monitor for jobId :" + monitorID.getJobID() +
-                    " , ComputeResourceDescription " + computeResourceDescription.getHostName() + " doesn't has an " +
-                    "IpAddress with it");
-        }
-        return true;
-    }
-
-    public void run() {
-        // before going to the while true mode we start unregister thread
-        startRegister = true; // this will be unset by someone else
-        while (startRegister || !ServerSettings.isStopAllThreads()) {
-            try {
-                MonitorID take = runningQueue.take();
-                this.registerListener(take);
-            } catch (AiravataMonitorException e) { // catch any exceptino inside the loop
-                logger.error(e.getMessage(), e);
-            } catch (InterruptedException e) {
-                logger.error(e.getMessage(), e);
-            } catch (Exception e){
-                logger.error(e.getMessage(), e);
-            }
-        }
-        Set<String> strings = availableChannels.keySet();
-        for(String key:strings) {
-            Channel channel = availableChannels.get(key);
-            try {
-                channel.close();
-            } catch (IOException e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-    }
-
-    @Subscribe
-    public boolean unRegisterListener(MonitorID monitorID) throws AiravataMonitorException {
-        Iterator<MonitorID> iterator = finishQueue.iterator();
-        MonitorID next = null;
-        while(iterator.hasNext()){
-            next = iterator.next();
-            if(next.getJobID().endsWith(monitorID.getJobID())){
-                break;
-            }
-        }
-        if(next == null) {
-            logger.error("Job has removed from the queue, old obsolete message recieved");
-            return false;
-        }
-        String channelID = CommonUtils.getChannelID(next);
-        if (JobState.FAILED.equals(monitorID.getStatus()) || JobState.COMPLETE.equals(monitorID.getStatus())) {
-            finishQueue.remove(next);
-
-            // if this is the last job in the queue at this point with the same username and same host we
-            // close the channel and close the connection and remove it from availableChannels
-            if (CommonUtils.isTheLastJobInQueue(finishQueue, next)) {
-                logger.info("There are no jobs to monitor for common ChannelID:" + channelID + " , so we unsubscribe it" +
-                        ", incase new job created we do subscribe again");
-                Channel channel = availableChannels.get(channelID);
-                if (channel == null) {
-                    logger.error("Already Unregistered the listener");
-                    throw new AiravataMonitorException("Already Unregistered the listener");
-                } else {
-                    try {
-                        channel.queueUnbind(channel.queueDeclare().getQueue(), "glue2.computing_activity", CommonUtils.getRoutingKey(next));
-                        channel.close();
-                        channel.getConnection().close();
-                        availableChannels.remove(channelID);
-                    } catch (IOException e) {
-                        logger.error("Error unregistering the listener");
-                        throw new AiravataMonitorException("Error unregistering the listener");
-                    }
-                }
-            }
-        }
-        next.setStatus(monitorID.getStatus());
-        JobIdentifier jobIdentity = new JobIdentifier(next.getJobID(),
-                                                     next.getTaskID(),
-                                                     next.getWorkflowNodeID(),
-                                                     next.getExperimentID(),
-                                                     next.getJobExecutionContext().getGatewayID());
-        publisher.publish(new JobStatusChangeEvent(next.getStatus(),jobIdentity));
-        return true;
-    }
-    @Override
-    public boolean stopRegister() throws AiravataMonitorException {
-        return false;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public Map<String, Channel> getAvailableChannels() {
-        return availableChannels;
-    }
-
-    public void setAvailableChannels(Map<String, Channel> availableChannels) {
-        this.availableChannels = availableChannels;
-    }
-
-    public LocalEventPublisher getPublisher() {
-        return publisher;
-    }
-
-    public void setPublisher(LocalEventPublisher publisher) {
-        this.publisher = publisher;
-    }
-
-    public BlockingQueue<MonitorID> getRunningQueue() {
-        return runningQueue;
-    }
-
-    public void setRunningQueue(BlockingQueue<MonitorID> runningQueue) {
-        this.runningQueue = runningQueue;
-    }
-
-    public BlockingQueue<MonitorID> getFinishQueue() {
-        return finishQueue;
-    }
-
-    public void setFinishQueue(BlockingQueue<MonitorID> finishQueue) {
-        this.finishQueue = finishQueue;
-    }
-
-    public String getProxyPath() {
-        return proxyPath;
-    }
-
-    public void setProxyPath(String proxyPath) {
-        this.proxyPath = proxyPath;
-    }
-
-    public List<String> getAmqpHosts() {
-        return amqpHosts;
-    }
-
-    public void setAmqpHosts(List<String> amqpHosts) {
-        this.amqpHosts = amqpHosts;
-    }
-
-    public boolean isStartRegister() {
-        return startRegister;
-    }
-
-    public void setStartRegister(boolean startRegister) {
-        this.startRegister = startRegister;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/BasicConsumer.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/BasicConsumer.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/BasicConsumer.java
deleted file mode 100644
index 4247524..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/BasicConsumer.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.impl.push.amqp;
-
-import org.apache.airavata.common.utils.LocalEventPublisher;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.core.MessageParser;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.rabbitmq.client.AMQP;
-import com.rabbitmq.client.Consumer;
-import com.rabbitmq.client.Envelope;
-import com.rabbitmq.client.ShutdownSignalException;
-
-public class BasicConsumer implements Consumer {
-    private final static Logger logger = LoggerFactory.getLogger(AMQPMonitor.class);
-
-    private MessageParser parser;
-
-    private LocalEventPublisher publisher;
-
-    public BasicConsumer(MessageParser parser, LocalEventPublisher publisher) {
-        this.parser = parser;
-        this.publisher = publisher;
-    }
-
-    public void handleCancel(String consumerTag) {
-    }
-
-    public void handleCancelOk(String consumerTag) {
-    }
-
-    public void handleConsumeOk(String consumerTag) {
-    }
-
-    public void handleDelivery(String consumerTag,
-                               Envelope envelope,
-                               AMQP.BasicProperties properties,
-                               byte[] body) {
-
-        logger.debug("job update for: " + envelope.getRoutingKey());
-        String message = new String(body);
-        message = message.replaceAll("(?m)^", "    ");
-        // Here we parse the message and get the job status and push it
-        // to the Event bus, this will be picked by
-//        AiravataJobStatusUpdator and store in to registry
-
-        logger.debug("************************************************************");
-        logger.debug("AMQP Message recieved \n" + message);
-        logger.debug("************************************************************");
-        try {
-            String jobID = envelope.getRoutingKey().split("\\.")[0];
-            MonitorID monitorID = new MonitorID(null, jobID, null, null, null, null,null);
-            monitorID.setStatus(parser.parseMessage(message));
-            publisher.publish(monitorID);
-        } catch (AiravataMonitorException e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    public void handleRecoverOk(String consumerTag) {
-    }
-
-    public void handleShutdownSignal(String consumerTag, ShutdownSignalException sig) {
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/ComputingActivity.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/ComputingActivity.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/ComputingActivity.java
deleted file mode 100644
index 5a36b4a..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/ComputingActivity.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.airavata.gfac.monitor.impl.push.amqp;
-
-import java.util.List;
-
-/**
- * Created by syodage on 6/3/15.
- */
-public class ComputingActivity {
-    String idFromEndpoint;
-    private List<String> state;
-
-    public String getIDFromEndpoint() {
-        return idFromEndpoint;
-    }
-
-    public List<String> getState() {
-        return state;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/JSONMessageParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/JSONMessageParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/JSONMessageParser.java
deleted file mode 100644
index 79cc417..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/JSONMessageParser.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.impl.push.amqp;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.airavata.gfac.monitor.core.MessageParser;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.model.experiment.JobState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.List;
-
-public class JSONMessageParser implements MessageParser {
-    private final static Logger logger = LoggerFactory.getLogger(JSONMessageParser.class);
-
-    public JobState parseMessage(String message)throws AiravataMonitorException {
-        /*todo write a json message parser here*/
-        logger.debug(message);
-        ObjectMapper objectMapper = new ObjectMapper();
-        try {
-            ComputingActivity computingActivity = objectMapper.readValue(message.getBytes(), ComputingActivity.class);
-            logger.info(computingActivity.getIDFromEndpoint());
-            List<String> stateList = computingActivity.getState();
-            JobState jobState = null;
-            for (String aState : stateList) {
-                jobState = getStatusFromString(aState);
-            }
-            // we get the last value of the state array
-            return jobState;
-        } catch (IOException e) {
-            throw new AiravataMonitorException(e);
-        }
-    }
-
-private JobState getStatusFromString(String status) {
-        logger.info("parsing the job status returned : " + status);
-        if(status != null){
-            if("ipf:finished".equals(status)){
-                return JobState.COMPLETE;
-            }else if("ipf:pending".equals(status)|| "ipf:starting".equals(status)){
-                return JobState.QUEUED;
-            }else if("ipf:running".equals(status) || "ipf:finishing".equals(status)){
-                return JobState.ACTIVE;
-            }else if ("ipf:held".equals(status) || "ipf:teminating".equals(status) || "ipf:teminated".equals(status)) {
-                return JobState.HELD;
-            } else if ("ipf:suspending".equals(status)) {
-                return JobState.SUSPENDED;
-            }else if ("ipf:failed".equals(status)) {
-                return JobState.FAILED;
-            }else if ("ipf:unknown".equals(status)){
-                return JobState.UNKNOWN;
-            }
-        }
-        return JobState.UNKNOWN;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/SimpleJobFinishConsumer.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/SimpleJobFinishConsumer.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/SimpleJobFinishConsumer.java
deleted file mode 100644
index c4275f1..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/SimpleJobFinishConsumer.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.impl.push.amqp;
-
-import com.rabbitmq.client.Channel;
-import com.rabbitmq.client.Connection;
-import com.rabbitmq.client.ConnectionFactory;
-import com.rabbitmq.client.QueueingConsumer;
-import org.apache.airavata.common.utils.Constants;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-
-public class SimpleJobFinishConsumer {
-    private final static Logger logger = LoggerFactory.getLogger(SimpleJobFinishConsumer.class);
-
-    private List<String> completedJobsFromPush;
-
-    public SimpleJobFinishConsumer(List<String> completedJobsFromPush) {
-        this.completedJobsFromPush = completedJobsFromPush;
-    }
-
-    public void listen() {
-        try {
-            String queueName = ServerSettings.getSetting(Constants.GFAC_SERVER_PORT, "8950");
-            String uri = "amqp://localhost";
-
-            ConnectionFactory connFactory = new ConnectionFactory();
-            connFactory.setUri(uri);
-            Connection conn = connFactory.newConnection();
-            logger.info("--------Created the connection to Rabbitmq server successfully-------");
-
-            final Channel ch = conn.createChannel();
-
-            logger.info("--------Created the channel with Rabbitmq server successfully-------");
-
-            ch.queueDeclare(queueName, false, false, false, null);
-
-            logger.info("--------Declare the queue " + queueName + " in Rabbitmq server successfully-------");
-
-            final QueueingConsumer consumer = new QueueingConsumer(ch);
-            ch.basicConsume(queueName, consumer);
-            (new Thread() {
-                public void run() {
-                    try {
-                        while (true) {
-                            QueueingConsumer.Delivery delivery = consumer.nextDelivery();
-                            String message = new String(delivery.getBody());
-                            logger.info("---------------- Job Finish message received:" + message + " --------------");
-                            synchronized (completedJobsFromPush) {
-                                completedJobsFromPush.add(message);
-                            }
-                            ch.basicAck(delivery.getEnvelope().getDeliveryTag(), false);
-                        }
-                    } catch (Exception ex) {
-                        logger.error("--------Cannot connect to a RabbitMQ Server--------" , ex);
-                    }
-                }
-
-            }).start();
-        } catch (Exception ex) {
-            logger.error("Cannot connect to a RabbitMQ Server: " , ex);
-            logger.info("------------- Push monitoring for HPC jobs is disabled -------------");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/UnRegisterWorker.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/UnRegisterWorker.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/UnRegisterWorker.java
deleted file mode 100644
index 41fd096..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/UnRegisterWorker.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.impl.push.amqp;
-
-import com.google.common.eventbus.Subscribe;
-import com.rabbitmq.client.Channel;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.gfac.monitor.util.CommonUtils;
-import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-import org.apache.airavata.model.experiment.JobState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.Map;
-
-public class UnRegisterWorker{
-    private final static Logger logger = LoggerFactory.getLogger(UnRegisterWorker.class);
-    private Map<String, Channel> availableChannels;
-
-    public UnRegisterWorker(Map<String, Channel> channels) {
-        this.availableChannels = channels;
-    }
-
-    @Subscribe
-    private boolean unRegisterListener(JobStatusChangeEvent jobStatus, MonitorID monitorID) throws AiravataMonitorException {
-        String channelID = CommonUtils.getChannelID(monitorID);
-        if (JobState.FAILED.equals(jobStatus.getState()) || JobState.COMPLETE.equals(jobStatus.getState())){
-            Channel channel = availableChannels.get(channelID);
-            if (channel == null) {
-                logger.error("Already Unregistered the listener");
-                throw new AiravataMonitorException("Already Unregistered the listener");
-            } else {
-                try {
-                    channel.queueUnbind(channel.queueDeclare().getQueue(), "glue2.computing_activity", CommonUtils.getRoutingKey(monitorID));
-                    channel.close();
-                    channel.getConnection().close();
-                    availableChannels.remove(channelID);
-                } catch (IOException e) {
-                    logger.error("Error unregistering the listener");
-                    throw new AiravataMonitorException("Error unregistering the listener");
-                }
-            }
-        }
-        return true;
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/AMQPConnectionUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/AMQPConnectionUtil.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/AMQPConnectionUtil.java
deleted file mode 100644
index 6a4ed3b..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/AMQPConnectionUtil.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.util;
-
-import com.rabbitmq.client.Connection;
-import com.rabbitmq.client.ConnectionFactory;
-import com.rabbitmq.client.DefaultSaslConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.TrustManagerFactory;
-import java.security.KeyStore;
-import java.util.Collections;
-import java.util.List;
-
-public class AMQPConnectionUtil {
-    private final static Logger logger = LoggerFactory.getLogger(AMQPConnectionUtil.class);
-    public static Connection connect(List<String>hosts,String vhost, String proxyFile) {
-        Collections.shuffle(hosts);
-        for (String host : hosts) {
-            Connection connection = connect(host, vhost, proxyFile);
-            if (host != null) {
-                System.out.println("connected to " + host);
-                return connection;
-            }
-        }
-        return null;
-    }
-
-    public static Connection connect(String host, String vhost, String proxyFile) {
-        Connection connection;
-        try {
-            String keyPassPhrase = "test123";
-            KeyStore ks = X509Helper.keyStoreFromPEM(proxyFile, keyPassPhrase);
-            KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
-            kmf.init(ks, keyPassPhrase.toCharArray());
-
-            KeyStore tks = X509Helper.trustKeyStoreFromCertDir();
-            TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
-            tmf.init(tks);
-
-            SSLContext c = SSLContext.getInstance("SSLv3");
-            c.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
-
-            ConnectionFactory factory = new ConnectionFactory();
-            factory.setHost(host);
-            factory.setPort(5671);
-            factory.useSslProtocol(c);
-            factory.setVirtualHost(vhost);
-            factory.setSaslConfig(DefaultSaslConfig.EXTERNAL);
-
-            connection = factory.newConnection();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            return null;
-        }
-        return connection;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
deleted file mode 100644
index a0b922d..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.util;
-
-import org.apache.airavata.common.logger.AiravataLogger;
-import org.apache.airavata.common.logger.AiravataLoggerFactory;
-import org.apache.airavata.common.utils.Constants;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.GFacHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerConfig;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.HostMonitorData;
-import org.apache.airavata.gfac.monitor.UserMonitorData;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.ZooDefs;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.BlockingQueue;
-
-public class CommonUtils {
-    private final static AiravataLogger logger = AiravataLoggerFactory.getLogger(CommonUtils.class);
-
-    public static String getChannelID(MonitorID monitorID) {
-        return monitorID.getUserName() + "-" + monitorID.getComputeResourceDescription().getHostName();
-    }
-
-    public static String getRoutingKey(MonitorID monitorID) {
-        return "*." + monitorID.getUserName() + "." + monitorID.getComputeResourceDescription().getIpAddresses().get(0);
-    }
-
-    public static String getChannelID(String userName,String hostAddress) {
-        return userName + "-" + hostAddress;
-    }
-
-    public static String getRoutingKey(String userName,String hostAddress) {
-        return "*." + userName + "." + hostAddress;
-    }
-
-    public static void addMonitortoQueue(BlockingQueue<UserMonitorData> queue, MonitorID monitorID, JobExecutionContext jobExecutionContext) throws AiravataMonitorException {
-        synchronized (queue) {
-            Iterator<UserMonitorData> iterator = queue.iterator();
-            while (iterator.hasNext()) {
-                UserMonitorData next = iterator.next();
-                if (next.getUserName().equals(monitorID.getUserName())) {
-                    // then this is the right place to update
-                    List<HostMonitorData> monitorIDs = next.getHostMonitorData();
-                    for (HostMonitorData host : monitorIDs) {
-                        if (isEqual(host.getComputeResourceDescription(), monitorID.getComputeResourceDescription())) {
-                            // ok we found right place to add this monitorID
-                            host.addMonitorIDForHost(monitorID);
-                            logger.debugId(monitorID.getJobID(), "Added new job to the monitoring queue, experiment {}," +
-                                    " task {}", monitorID.getExperimentID(), monitorID.getTaskID());
-                            return;
-                        }
-                    }
-                    // there is a userMonitor object for this user name but no Hosts for this host
-                    // so we have to create new Hosts
-                    HostMonitorData hostMonitorData = new HostMonitorData(jobExecutionContext);
-                    hostMonitorData.addMonitorIDForHost(monitorID);
-                    next.addHostMonitorData(hostMonitorData);
-                    logger.debugId(monitorID.getJobID(), "Added new job to the monitoring queue, experiment {}," +
-                            " task {}", monitorID.getExperimentID(), monitorID.getTaskID());
-                    return;
-                }
-            }
-            HostMonitorData hostMonitorData = new HostMonitorData(jobExecutionContext);
-            hostMonitorData.addMonitorIDForHost(monitorID);
-
-            UserMonitorData userMonitorData = new UserMonitorData(monitorID.getUserName());
-            userMonitorData.addHostMonitorData(hostMonitorData);
-            try {
-                queue.put(userMonitorData);
-                logger.debugId(monitorID.getJobID(), "Added new job to the monitoring queue, experiment {}," +
-                        " task {}", monitorID.getExperimentID(), monitorID.getTaskID());
-            } catch (InterruptedException e) {
-                throw new AiravataMonitorException(e);
-            }
-        }
-    }
-
-    private static boolean isEqual(ComputeResourceDescription comRes_1, ComputeResourceDescription comRes_2) {
-        return comRes_1.getComputeResourceId().equals(comRes_2.getComputeResourceId()) &&
-                comRes_1.getHostName().equals(comRes_2.getHostName());
-    }
-
-    public static boolean isTheLastJobInQueue(BlockingQueue<MonitorID> queue,MonitorID monitorID){
-        Iterator<MonitorID> iterator = queue.iterator();
-        while(iterator.hasNext()){
-            MonitorID next = iterator.next();
-            if (monitorID.getUserName().equals(next.getUserName()) &&
-                    CommonUtils.isEqual(monitorID.getComputeResourceDescription(), next.getComputeResourceDescription())) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    /**
-     * This method doesn't have to be synchronized because it will be invoked by HPCPullMonitor which already synchronized
-     * @param monitorID
-     * @throws AiravataMonitorException
-     */
-    public static void removeMonitorFromQueue(UserMonitorData userMonitorData, MonitorID monitorID) throws AiravataMonitorException {
-                if (userMonitorData.getUserName().equals(monitorID.getUserName())) {
-                    // then this is the right place to update
-                    List<HostMonitorData> hostMonitorData = userMonitorData.getHostMonitorData();
-                    Iterator<HostMonitorData> iterator1 = hostMonitorData.iterator();
-                    while (iterator1.hasNext()) {
-                        HostMonitorData iHostMonitorID = iterator1.next();
-                        if (isEqual(iHostMonitorID.getComputeResourceDescription(), monitorID.getComputeResourceDescription())) {
-                            Iterator<MonitorID> iterator2 = iHostMonitorID.getMonitorIDs().iterator();
-                            while (iterator2.hasNext()) {
-                                MonitorID iMonitorID = iterator2.next();
-                                if (iMonitorID.getJobID().equals(monitorID.getJobID())
-                                        || iMonitorID.getJobName().equals(monitorID.getJobName())) {
-                                    // OK we found the object, we cannot do list.remove(object) states of two objects
-                                    // could be different, thats why we check the jobID
-                                    iterator2.remove();
-                                    logger.infoId(monitorID.getJobID(), "Removed the jobId: {} JobName: {} from monitoring last " +
-                                            "status:{}", monitorID.getJobID(),monitorID.getJobName(), monitorID.getStatus().toString());
-
-                                    return;
-                                }
-                            }
-                        }
-                    }
-                }
-        logger.info("Cannot find the given MonitorID in the queue with userName " +
-                monitorID.getUserName() + "  and jobID " + monitorID.getJobID());
-        logger.info("This might not be an error because someone else removed this job from the queue");
-    }
-
-
-    public static void invokeOutFlowHandlers(JobExecutionContext jobExecutionContext) throws GFacException {
-        List<GFacHandlerConfig> handlers = jobExecutionContext.getGFacConfiguration().getOutHandlers();
-
-        for (GFacHandlerConfig handlerClassName : handlers) {
-            Class<? extends GFacHandler> handlerClass;
-            GFacHandler handler;
-            try {
-                handlerClass = Class.forName(handlerClassName.getClassName().trim()).asSubclass(GFacHandler.class);
-                handler = handlerClass.newInstance();
-                handler.initProperties(handlerClassName.getProperties());
-            } catch (ClassNotFoundException e) {
-                logger.error(e.getMessage());
-                throw new GFacException("Cannot load handler class " + handlerClassName, e);
-            } catch (InstantiationException e) {
-                logger.error(e.getMessage());
-                throw new GFacException("Cannot instantiate handler class " + handlerClassName, e);
-            } catch (IllegalAccessException e) {
-                logger.error(e.getMessage());
-                throw new GFacException("Cannot instantiate handler class " + handlerClassName, e);
-            }
-            try {
-                handler.invoke(jobExecutionContext);
-            } catch (Exception e) {
-                // TODO: Better error reporting.
-                throw new GFacException("Error Executing a OutFlow Handler", e);
-            }
-        }
-    }
-
-        /**
-         *  Update job count for a given set of paths.
-         * @param curatorClient - CuratorFramework instance
-         * @param changeCountMap - map of change job count with relevant path
-         * @param isAdd - Should add or reduce existing job count by the given job count.
-         */
-    public static void updateZkWithJobCount(CuratorFramework curatorClient, final Map<String, Integer> changeCountMap, boolean isAdd) {
-        StringBuilder changeZNodePaths = new StringBuilder();
-        try {
-            for (String path : changeCountMap.keySet()) {
-                if (isAdd) {
-                    CommonUtils.checkAndCreateZNode(curatorClient, path);
-                }
-                byte[] byteData = curatorClient.getData().forPath(path);
-                String nodeData;
-                if (byteData == null) {
-                    if (isAdd) {
-                        curatorClient.setData().withVersion(-1).forPath(path, String.valueOf(changeCountMap.get(path)).getBytes());
-                    } else {
-                        // This is not possible, but we handle in case there any data zookeeper communication failure
-                        logger.warn("Couldn't reduce job count in " + path + " as it returns null data. Hence reset the job count to 0");
-                        curatorClient.setData().withVersion(-1).forPath(path, "0".getBytes());
-                    }
-                } else {
-                    nodeData = new String(byteData);
-                    if (isAdd) {
-                        curatorClient.setData().withVersion(-1).forPath(path,
-                                String.valueOf(changeCountMap.get(path) + Integer.parseInt(nodeData)).getBytes());
-                    } else {
-                        int previousCount = Integer.parseInt(nodeData);
-                        int removeCount = changeCountMap.get(path);
-                        if (previousCount >= removeCount) {
-                            curatorClient.setData().withVersion(-1).forPath(path,
-                                    String.valueOf(previousCount - removeCount).getBytes());
-                        } else {
-                            // This is not possible, do we need to reset the job count to 0 ?
-                            logger.error("Requested remove job count is " + removeCount +
-                                    " which is higher than the existing job count " + previousCount
-                                    + " in  " + path + " path.");
-                        }
-                    }
-                }
-                changeZNodePaths.append(path).append(":");
-            }
-
-            // update stat node to trigger orchestrator watchers
-            if (changeCountMap.size() > 0) {
-                changeZNodePaths.deleteCharAt(changeZNodePaths.length() - 1);
-                curatorClient.setData().withVersion(-1).forPath("/" + Constants.STAT, changeZNodePaths.toString().getBytes());
-            }
-        } catch (Exception e) {
-            logger.error("Error while writing job count to zookeeper", e);
-        }
-
-    }
-
-    /**
-     * Increase job count by one and update the zookeeper
-     * @param monitorID - Job monitorId
-     */
-    public static void increaseZkJobCount(MonitorID monitorID) {
-        Map<String, Integer> addMap = new HashMap<String, Integer>();
-        addMap.put(CommonUtils.getJobCountUpdatePath(monitorID), 1);
-        updateZkWithJobCount(monitorID.getJobExecutionContext().getCuratorClient(), addMap, true);
-    }
-
-    /**
-     * Construct and return the path for a given MonitorID , eg: /stat/{username}/{resourceName}/job
-     * @param monitorID - Job monitorId
-     * @return
-     */
-    public static String getJobCountUpdatePath(MonitorID monitorID){
-        return new StringBuilder("/").append(Constants.STAT).append("/").append(monitorID.getUserName())
-                .append("/").append(monitorID.getComputeResourceDescription().getHostName()).append("/").append(Constants.JOB).toString();
-    }
-
-    /**
-     * Check whether znode is exist in given path if not create a new znode
-     * @param curatorClient - zookeeper instance
-     * @param path - path to check znode
-     * @throws KeeperException
-     * @throws InterruptedException
-     */
-    private static void checkAndCreateZNode(CuratorFramework curatorClient , String path) throws Exception {
-        if (curatorClient.checkExists().forPath(path) == null) { // if znode doesn't exist
-            if (path.lastIndexOf("/") > 1) {  // recursively traverse to parent znode and check parent exist
-                checkAndCreateZNode(curatorClient, (path.substring(0, path.lastIndexOf("/"))));
-            }
-            curatorClient.create().withMode(CreateMode.PERSISTENT).withACL(ZooDefs.Ids.OPEN_ACL_UNSAFE).forPath(path);
-        }
-    }
-}


[5/7] airavata git commit: Removed gsi related code

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHUtils.java
deleted file mode 100644
index 3271744..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/util/SSHUtils.java
+++ /dev/null
@@ -1,760 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.util;
-
-import com.jcraft.jsch.*;
-import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.gsi.ssh.config.ConfigReader;
-import org.apache.airavata.gfac.gsi.ssh.impl.StandardOutReader;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * This class is going to be useful to SCP a file to a remote grid machine using my proxy credentials
- */
-public class SSHUtils {
-    private static final org.slf4j.Logger log = LoggerFactory.getLogger(SSHUtils.class);
-
-    static {
-        JSch.setConfig("gssapi-with-mic.x509", "org.apache.airavata.gfac.ssh.GSSContextX509");
-        JSch.setConfig("userauth.gssapi-with-mic", "com.jcraft.jsch.UserAuthGSSAPIWithMICGSSCredentials");
-
-    }
-
-    private ServerInfo serverInfo;
-
-    private GSIAuthenticationInfo authenticationInfo;
-
-    private ConfigReader configReader;
-
-    /**
-     * We need to pass certificateLocation when we use SCPTo method standalone
-     *
-     * @param serverInfo
-     * @param authenticationInfo
-     * @param certificateLocation
-     * @param configReader
-     */
-    public SSHUtils(ServerInfo serverInfo, GSIAuthenticationInfo authenticationInfo, String certificateLocation, ConfigReader configReader) {
-        System.setProperty("X509_CERT_DIR", certificateLocation);
-        this.serverInfo = serverInfo;
-        this.authenticationInfo = authenticationInfo;
-        this.configReader = configReader;
-    }
-
-    /**
-     * This can be used when use SCPTo method within SSHAPi because SSHApiFactory already set the system property certificateLocation
-     *
-     * @param serverInfo
-     * @param authenticationInfo
-     * @param configReader
-     */
-    public SSHUtils(ServerInfo serverInfo, GSIAuthenticationInfo authenticationInfo
-            , ConfigReader configReader) {
-        this.serverInfo = serverInfo;
-        this.authenticationInfo = authenticationInfo;
-        this.configReader = configReader;
-    }
-
-    /**
-     * This  method will scp the lFile to the rFile location
-     *
-     * @param rFile remote file Path to use in scp
-     * @param lFile local file path to use in scp
-     * @throws IOException
-     * @throws JSchException
-     * @throws SSHApiException
-     */
-    public void scpTo(String rFile, String lFile) throws IOException, JSchException, SSHApiException {
-        FileInputStream fis = null;
-        String prefix = null;
-        if (new File(lFile).isDirectory()) {
-            prefix = lFile + File.separator;
-        }
-        JSch jsch = new JSch();
-
-        log.debug("Connecting to server - " + serverInfo.getHost() + ":" + serverInfo.getPort() + " with user name - "
-                + serverInfo.getUserName());
-
-        Session session = null;
-
-        try {
-            session = jsch.getSession(serverInfo.getUserName(), serverInfo.getHost(), serverInfo.getPort());
-        } catch (JSchException e) {
-            throw new SSHApiException("An exception occurred while creating SSH session." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        java.util.Properties config = this.configReader.getProperties();
-        session.setConfig(config);
-
-        // Not a good way, but we dont have any choice
-        if (session instanceof ExtendedSession) {
-            ((ExtendedSession) session).setAuthenticationInfo(authenticationInfo);
-        }
-
-        try {
-            session.connect();
-        } catch (JSchException e) {
-            throw new SSHApiException("An exception occurred while connecting to server." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        boolean ptimestamp = true;
-
-        // exec 'scp -t rfile' remotely
-        String command = "scp " + (ptimestamp ? "-p" : "") + " -t " + rFile;
-        Channel channel = session.openChannel("exec");
-
-        StandardOutReader stdOutReader = new StandardOutReader();
-        ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-        ((ChannelExec) channel).setCommand(command);
-
-        // get I/O streams for remote scp
-        OutputStream out = channel.getOutputStream();
-        InputStream in = channel.getInputStream();
-
-        channel.connect();
-
-        if (checkAck(in) != 0) {
-            String error = "Error Reading input Stream";
-            log.error(error);
-            throw new SSHApiException(error);
-        }
-
-        File _lfile = new File(lFile);
-
-        if (ptimestamp) {
-            command = "T" + (_lfile.lastModified() / 1000) + " 0";
-            // The access time should be sent here,
-            // but it is not accessible with JavaAPI ;-<
-            command += (" " + (_lfile.lastModified() / 1000) + " 0\n");
-            out.write(command.getBytes());
-            out.flush();
-            if (checkAck(in) != 0) {
-                String error = "Error Reading input Stream";
-                log.error(error);
-                throw new SSHApiException(error);
-            }
-        }
-
-        // send "C0644 filesize filename", where filename should not include '/'
-        long filesize = _lfile.length();
-        command = "C0644 " + filesize + " ";
-        if (lFile.lastIndexOf('/') > 0) {
-            command += lFile.substring(lFile.lastIndexOf('/') + 1);
-        } else {
-            command += lFile;
-        }
-        command += "\n";
-        out.write(command.getBytes());
-        out.flush();
-        if (checkAck(in) != 0) {
-            String error = "Error Reading input Stream";
-            log.error(error);
-            throw new SSHApiException(error);
-        }
-
-        // send a content of lFile
-        fis = new FileInputStream(lFile);
-        byte[] buf = new byte[1024];
-        while (true) {
-            int len = fis.read(buf, 0, buf.length);
-            if (len <= 0) break;
-            out.write(buf, 0, len); //out.flush();
-        }
-        fis.close();
-        fis = null;
-        // send '\0'
-        buf[0] = 0;
-        out.write(buf, 0, 1);
-        out.flush();
-        if (checkAck(in) != 0) {
-            String error = "Error Reading input Stream";
-            log.error(error);
-            throw new SSHApiException(error);
-        }
-        out.close();
-
-        stdOutReader.onOutput(channel);
-
-
-        if (stdOutReader.getStdErrorString().contains("scp:")) {
-            throw new SSHApiException(stdOutReader.getStdErrorString());
-        }
-        channel.disconnect();
-    }
-
-    /**
-     * This will copy a local file to a remote location
-     *
-     * @param remoteFile remote location you want to transfer the file, this cannot be a directory, if user pass
-     *                   a dirctory we do copy it to that directory but we simply return the directory name
-     *                   todo handle the directory name as input and return the proper final output file name
-     * @param localFile  Local file to transfer, this can be a directory
-     * @param session
-     * @return returns the final remote file path, so that users can use the new file location
-     * @throws IOException
-     * @throws JSchException
-     * @throws SSHApiException
-     */
-    public static String scpTo(String remoteFile, String localFile, Session session) throws IOException, JSchException, SSHApiException {
-        FileInputStream fis = null;
-        String prefix = null;
-        if (new File(localFile).isDirectory()) {
-            prefix = localFile + File.separator;
-        }
-        boolean ptimestamp = true;
-
-        // exec 'scp -t rfile' remotely
-        String command = "scp " + (ptimestamp ? "-p" : "") + " -t " + remoteFile;
-        Channel channel = session.openChannel("exec");
-
-        StandardOutReader stdOutReader = new StandardOutReader();
-        ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-        ((ChannelExec) channel).setCommand(command);
-
-        // get I/O streams for remote scp
-        OutputStream out = channel.getOutputStream();
-        InputStream in = channel.getInputStream();
-
-        channel.connect();
-
-        if (checkAck(in) != 0) {
-            String error = "Error Reading input Stream";
-            log.error(error);
-            throw new SSHApiException(error);
-        }
-
-        File _lfile = new File(localFile);
-
-        if (ptimestamp) {
-            command = "T" + (_lfile.lastModified() / 1000) + " 0";
-            // The access time should be sent here,
-            // but it is not accessible with JavaAPI ;-<
-            command += (" " + (_lfile.lastModified() / 1000) + " 0\n");
-            out.write(command.getBytes());
-            out.flush();
-            if (checkAck(in) != 0) {
-                String error = "Error Reading input Stream";
-                log.error(error);
-                throw new SSHApiException(error);
-            }
-        }
-
-        // send "C0644 filesize filename", where filename should not include '/'
-        long filesize = _lfile.length();
-        command = "C0644 " + filesize + " ";
-        if (localFile.lastIndexOf('/') > 0) {
-            command += localFile.substring(localFile.lastIndexOf('/') + 1);
-        } else {
-            command += localFile;
-        }
-        command += "\n";
-        out.write(command.getBytes());
-        out.flush();
-        if (checkAck(in) != 0) {
-            String error = "Error Reading input Stream";
-            log.error(error);
-            throw new SSHApiException(error);
-        }
-
-        // send a content of lFile
-        fis = new FileInputStream(localFile);
-        byte[] buf = new byte[1024];
-        while (true) {
-            int len = fis.read(buf, 0, buf.length);
-            if (len <= 0) break;
-            out.write(buf, 0, len); //out.flush();
-        }
-        fis.close();
-        fis = null;
-        // send '\0'
-        buf[0] = 0;
-        out.write(buf, 0, 1);
-        out.flush();
-        if (checkAck(in) != 0) {
-            String error = "Error Reading input Stream";
-            log.error(error);
-            throw new SSHApiException(error);
-        }
-        out.close();
-        stdOutReader.onOutput(channel);
-
-
-        channel.disconnect();
-        if (stdOutReader.getStdErrorString().contains("scp:")) {
-            throw new SSHApiException(stdOutReader.getStdErrorString());
-        }
-        //since remote file is always a file  we just return the file
-        return remoteFile;
-    }
-
-    /**
-     * This method will copy a remote file to a local directory
-     *
-     * @param remoteFile remote file path, this has to be a full qualified path
-     * @param localFile  This is the local file to copy, this can be a directory too
-     * @param session
-     * @return returns the final local file path of the new file came from the remote resource
-     */
-    public static void scpFrom(String remoteFile, String localFile, Session session) throws IOException, JSchException, SSHApiException {
-        FileOutputStream fos = null;
-        try {
-            String prefix = null;
-            if (new File(localFile).isDirectory()) {
-                prefix = localFile + File.separator;
-            }
-
-            // exec 'scp -f remotefile' remotely
-            String command = "scp -f " + remoteFile;
-            Channel channel = session.openChannel("exec");
-            ((ChannelExec) channel).setCommand(command);
-
-            StandardOutReader stdOutReader = new StandardOutReader();
-            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-            // get I/O streams for remote scp
-            OutputStream out = channel.getOutputStream();
-            InputStream in = channel.getInputStream();
-
-            channel.connect();
-
-            byte[] buf = new byte[1024];
-
-            // send '\0'
-            buf[0] = 0;
-            out.write(buf, 0, 1);
-            out.flush();
-
-            while (true) {
-                int c = checkAck(in);
-                if (c != 'C') {
-                    break;
-                }
-
-                // read '0644 '
-                in.read(buf, 0, 5);
-
-                long filesize = 0L;
-                while (true) {
-                    if (in.read(buf, 0, 1) < 0) {
-                        // error
-                        break;
-                    }
-                    if (buf[0] == ' ') break;
-                    filesize = filesize * 10L + (long) (buf[0] - '0');
-                }
-
-                String file = null;
-                for (int i = 0; ; i++) {
-                    in.read(buf, i, 1);
-                    if (buf[i] == (byte) 0x0a) {
-                        file = new String(buf, 0, i);
-                        break;
-                    }
-                }
-
-                //System.out.println("filesize="+filesize+", file="+file);
-
-                // send '\0'
-                buf[0] = 0;
-                out.write(buf, 0, 1);
-                out.flush();
-
-                // read a content of lfile
-                fos = new FileOutputStream(prefix == null ? localFile : prefix + file);
-                int foo;
-                while (true) {
-                    if (buf.length < filesize) foo = buf.length;
-                    else foo = (int) filesize;
-                    foo = in.read(buf, 0, foo);
-                    if (foo < 0) {
-                        // error
-                        break;
-                    }
-                    fos.write(buf, 0, foo);
-                    filesize -= foo;
-                    if (filesize == 0L) break;
-                }
-                fos.close();
-                fos = null;
-
-                if (checkAck(in) != 0) {
-                    String error = "Error transfering the file content";
-                    log.error(error);
-                    throw new SSHApiException(error);
-                }
-
-                // send '\0'
-                buf[0] = 0;
-                out.write(buf, 0, 1);
-                out.flush();
-            }
-            stdOutReader.onOutput(channel);
-            if (stdOutReader.getStdErrorString().contains("scp:")) {
-                throw new SSHApiException(stdOutReader.getStdErrorString());
-            }
-
-        } catch (Exception e) {
-            log.error(e.getMessage(), e);
-        } finally {
-            try {
-                if (fos != null) fos.close();
-            } catch (Exception ee) {
-            }
-        }
-    }
-
-    /**
-     * This method will copy a remote file to a local directory
-     *
-     * @param remoteFile remote file path, this has to be a full qualified path
-     * @param localFile  This is the local file to copy, this can be a directory too
-     */
-    public void scpFrom(String remoteFile, String localFile) throws SSHApiException {
-        JSch jsch = new JSch();
-
-        log.debug("Connecting to server - " + serverInfo.getHost() + ":" + serverInfo.getPort() + " with user name - "
-                + serverInfo.getUserName());
-
-        Session session = null;
-
-        try {
-            session = jsch.getSession(serverInfo.getUserName(), serverInfo.getHost(), serverInfo.getPort());
-        } catch (JSchException e) {
-            throw new SSHApiException("An exception occurred while creating SSH session." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        java.util.Properties config = this.configReader.getProperties();
-        session.setConfig(config);
-
-        // Not a good way, but we dont have any choice
-        if (session instanceof ExtendedSession) {
-            ((ExtendedSession) session).setAuthenticationInfo(authenticationInfo);
-        }
-
-        try {
-            session.connect();
-        } catch (JSchException e) {
-            throw new SSHApiException("An exception occurred while connecting to server." +
-                    "Connecting server - " + serverInfo.getHost() + ":" + serverInfo.getPort() +
-                    " connecting user name - "
-                    + serverInfo.getUserName(), e);
-        }
-
-        FileOutputStream fos = null;
-        try {
-            String prefix = null;
-            if (new File(localFile).isDirectory()) {
-                prefix = localFile + File.separator;
-            }
-
-            // exec 'scp -f remotefile' remotely
-            StandardOutReader stdOutReader = new StandardOutReader();
-            String command = "scp -f " + remoteFile;
-            Channel channel = session.openChannel("exec");
-            ((ChannelExec) channel).setCommand(command);
-            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-            // get I/O streams for remote scp
-            OutputStream out = channel.getOutputStream();
-            InputStream in = channel.getInputStream();
-
-            channel.connect();
-
-            byte[] buf = new byte[1024];
-
-            // send '\0'
-            buf[0] = 0;
-            out.write(buf, 0, 1);
-            out.flush();
-
-            while (true) {
-                int c = checkAck(in);
-                if (c != 'C') {
-                    break;
-                }
-
-                // read '0644 '
-                in.read(buf, 0, 5);
-
-                long filesize = 0L;
-                while (true) {
-                    if (in.read(buf, 0, 1) < 0) {
-                        // error
-                        break;
-                    }
-                    if (buf[0] == ' ') break;
-                    filesize = filesize * 10L + (long) (buf[0] - '0');
-                }
-
-                String file = null;
-                for (int i = 0; ; i++) {
-                    in.read(buf, i, 1);
-                    if (buf[i] == (byte) 0x0a) {
-                        file = new String(buf, 0, i);
-                        break;
-                    }
-                }
-
-                //System.out.println("filesize="+filesize+", file="+file);
-
-                // send '\0'
-                buf[0] = 0;
-                out.write(buf, 0, 1);
-                out.flush();
-
-                // read a content of lfile
-                fos = new FileOutputStream(prefix == null ? localFile : prefix + file);
-                int foo;
-                while (true) {
-                    if (buf.length < filesize) foo = buf.length;
-                    else foo = (int) filesize;
-                    foo = in.read(buf, 0, foo);
-                    if (foo < 0) {
-                        // error
-                        break;
-                    }
-                    fos.write(buf, 0, foo);
-                    filesize -= foo;
-                    if (filesize == 0L) break;
-                }
-                fos.close();
-                fos = null;
-
-                if (checkAck(in) != 0) {
-                    String error = "Error transfering the file content";
-                    log.error(error);
-                    throw new SSHApiException(error);
-                }
-
-                // send '\0'
-                buf[0] = 0;
-                out.write(buf, 0, 1);
-                out.flush();
-            }
-
-//            session.disconnect();
-
-            stdOutReader.onOutput(channel);
-            if (stdOutReader.getStdErrorString().contains("scp:")) {
-                throw new SSHApiException(stdOutReader.getStdErrorString());
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage(), e);
-        } finally {
-            try {
-                if (fos != null) fos.close();
-            } catch (Exception ee) {
-            }
-        }
-    }
-
-    /**
-     * This method will copy a remote file to a local directory
-     *
-     * @param remoteFile remote file path, this has to be a full qualified path
-     * @param localFile  This is the local file to copy, this can be a directory too
-     * @param session
-     * @return returns the final local file path of the new file came from the remote resource
-     */
-    public static void scpThirdParty(String remoteFileSource, String remoteFileTarget, Session session) throws IOException, JSchException, SSHApiException {
-        FileOutputStream fos = null;
-        try {
-            String prefix = null;
-
-            // exec 'scp -f remotefile' remotely
-            String command = "scp -3 " + remoteFileSource + " " + remoteFileTarget;
-            Channel channel = session.openChannel("exec");
-            ((ChannelExec) channel).setCommand(command);
-
-            StandardOutReader stdOutReader = new StandardOutReader();
-            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-            // get I/O streams for remote scp
-            OutputStream out = channel.getOutputStream();
-            InputStream in = channel.getInputStream();
-
-            channel.connect();
-
-            byte[] buf = new byte[1024];
-
-            // send '\0'
-            buf[0] = 0;
-            out.write(buf, 0, 1);
-            out.flush();
-
-            while (true) {
-                int c = checkAck(in);
-                if (c != 'C') {
-                    break;
-                }
-
-                // read '0644 '
-                in.read(buf, 0, 5);
-
-                long filesize = 0L;
-                while (true) {
-                    if (in.read(buf, 0, 1) < 0) {
-                        // error
-                        break;
-                    }
-                    if (buf[0] == ' ') break;
-                    filesize = filesize * 10L + (long) (buf[0] - '0');
-                }
-                int foo;
-                while (true) {
-                    if (buf.length < filesize) foo = buf.length;
-                    else foo = (int) filesize;
-
-                    int len = in.read(buf, 0, foo);
-                    if (len <= 0) break;
-                    out.write(buf, 0, len);
-                }
-                // send '\0'
-                buf[0] = 0;
-                out.write(buf, 0, 1);
-                out.flush();
-                if (checkAck(in) != 0) {
-                    String error = "Error transfering the file content";
-                    log.error(error);
-                    throw new SSHApiException(error);
-                }
-
-            }
-            out.close();
-
-            stdOutReader.onOutput(channel);
-            if (stdOutReader.getStdErrorString().contains("scp:")) {
-                throw new SSHApiException(stdOutReader.getStdErrorString());
-            }
-
-        } catch (Exception e) {
-            log.error(e.getMessage(), e);
-        } finally {
-            try {
-                if (fos != null) fos.close();
-            } catch (Exception ee) {
-            }
-        }
-    }
-
-    public static void makeDirectory(String path, Session session) throws IOException, JSchException, SSHApiException {
-
-        // exec 'scp -t rfile' remotely
-        String command = "mkdir -p " + path;
-        Channel channel = session.openChannel("exec");
-        StandardOutReader stdOutReader = new StandardOutReader();
-
-        ((ChannelExec) channel).setCommand(command);
-
-
-        ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-        try {
-            channel.connect();
-        } catch (JSchException e) {
-
-            channel.disconnect();
-//            session.disconnect();
-
-            throw new SSHApiException("Unable to retrieve command output. Command - " + command +
-                    " on server - " + session.getHost() + ":" + session.getPort() +
-                    " connecting user name - "
-                    + session.getUserName(), e);
-        }
-        stdOutReader.onOutput(channel);
-        if (stdOutReader.getStdErrorString().contains("mkdir:")) {
-            throw new SSHApiException(stdOutReader.getStdErrorString());
-        }
-
-        channel.disconnect();
-    }
-
-    public static List<String> listDirectory(String path, Session session) throws IOException, JSchException, SSHApiException {
-
-        // exec 'scp -t rfile' remotely
-        String command = "ls " + path;
-        Channel channel = session.openChannel("exec");
-        StandardOutReader stdOutReader = new StandardOutReader();
-
-        ((ChannelExec) channel).setCommand(command);
-
-
-        ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-        try {
-            channel.connect();
-        } catch (JSchException e) {
-
-            channel.disconnect();
-//            session.disconnect();
-
-            throw new SSHApiException("Unable to retrieve command output. Command - " + command +
-                    " on server - " + session.getHost() + ":" + session.getPort() +
-                    " connecting user name - "
-                    + session.getUserName(), e);
-        }
-        stdOutReader.onOutput(channel);
-        stdOutReader.getStdOutputString();
-        if (stdOutReader.getStdErrorString().contains("ls:")) {
-            throw new SSHApiException(stdOutReader.getStdErrorString());
-        }
-        channel.disconnect();
-        return Arrays.asList(stdOutReader.getStdOutputString().split("\n"));
-    }
-
-    static int checkAck(InputStream in) throws IOException {
-        int b = in.read();
-        if (b == 0) return b;
-        if (b == -1) return b;
-
-        if (b == 1 || b == 2) {
-            StringBuffer sb = new StringBuffer();
-            int c;
-            do {
-                c = in.read();
-                sb.append((char) c);
-            }
-            while (c != '\n');
-            if (b == 1) { // error
-                System.out.print(sb.toString());
-            }
-            if (b == 2) { // fatal error
-                System.out.print(sb.toString());
-            }
-        }
-        return b;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
deleted file mode 100644
index 5694a64..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsissh.handler;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.model.experiment.*;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Properties;
-
-public class GSISSHDirectorySetupHandler extends AbstractHandler {
-      private static final Logger log = LoggerFactory.getLogger(GSISSHDirectorySetupHandler.class);
-
-	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-            }
-        } catch (Exception e) {
-        	 try {
-                 StringWriter errors = new StringWriter();
-                 e.printStackTrace(new PrintWriter(errors));
-  				GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-  			} catch (GFacException e1) {
-  				 log.error(e1.getLocalizedMessage());
-  			}
-        	throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        }
-
-        log.info("Setup SSH job directorties");
-        super.invoke(jobExecutionContext);
-        makeDirectory(jobExecutionContext);
-	}
-	private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        RemoteCluster remoteCluster = null;
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-            if (remoteCluster == null) {
-                try {
-                    GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                } catch (GFacException e1) {
-                    log.error(e1.getLocalizedMessage());
-                }
-                throw new GFacHandlerException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-
-            String workingDirectory = jobExecutionContext.getWorkingDir();
-            remoteCluster.makeDirectory(workingDirectory);
-            if(!jobExecutionContext.getInputDir().equals(workingDirectory))
-                remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
-            if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
-            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
-            
-            DataTransferDetails detail = new DataTransferDetails();
-            TransferStatus status = new TransferStatus();
-            status.setTransferState(TransferState.DIRECTORY_SETUP);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("Working directory = " + workingDirectory);
-
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-		} catch (Exception e) {
-			DataTransferDetails detail = new DataTransferDetails();
-			TransferStatus status = new TransferStatus();
-			detail.setTransferDescription("Working directory = " + jobExecutionContext.getWorkingDir());
-			status.setTransferState(TransferState.FAILED);
-			detail.setTransferStatus(status);
-			try {
-				experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-			} catch (Exception e1) {
-				throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-			}
-			throw new GFacHandlerException("Error executing the Handler: " + GSISSHDirectorySetupHandler.class, e);
-		}
-	}
-
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-         this.invoke(jobExecutionContext);
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
deleted file mode 100644
index f73a04f..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsissh.handler;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.DataTransferDetails;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.model.experiment.TransferState;
-import org.apache.airavata.model.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-
-/**
- * Recoverability for this handler assumes the same input values will come in the second
- * run, and assume nobody is changing registry during the original submission and re-submission
- */
-public class GSISSHInputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(GSISSHInputHandler.class);
-
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        super.invoke(jobExecutionContext);
-        int index = 0;
-        int oldIndex = 0;
-        List<String> oldFiles = new ArrayList<String>();
-        MessageContext inputNew = new MessageContext();
-        DataTransferDetails detail = new DataTransferDetails();
-        TransferStatus status = new TransferStatus();
-        StringBuffer data = new StringBuffer("|");
-        RemoteCluster remoteCluster = null;
-
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-            }
-
-            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-            if (remoteCluster == null) {
-                throw new GFacException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-
-            String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
-            if (pluginData != null) {
-                try {
-                    oldIndex = Integer.parseInt(pluginData.split("\\|")[0].trim());
-                    oldFiles = Arrays.asList(pluginData.split("\\|")[1].split(","));
-                    if (oldIndex == oldFiles.size()) {
-                        log.info("Old data looks good !!!!");
-                    } else {
-                        oldIndex = 0;
-                        oldFiles.clear();
-                    }
-                } catch (NumberFormatException e) {
-                    log.error("Previously stored data " + pluginData + " is wrong so we continue the operations");
-                }
-            }
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                try {
-                    GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-                } catch (ApplicationSettingsException e) {
-                    log.error(e.getMessage());
-                    try {
-                        GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                    } catch (GFacException e1) {
-                        log.error(e1.getLocalizedMessage());
-                    }
-                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-                }
-            }
-            log.info("Invoking SCPInputHandler");
-
-            MessageContext input = jobExecutionContext.getInMessageContext();
-            Set<String> parameters = input.getParameters().keySet();
-            for (String paramName : parameters) {
-                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
-                String paramValue = inputParamType.getValue();
-                //TODO: Review this with type
-                if (inputParamType.getType() == DataType.URI) {
-                    if (index < oldIndex) {
-                        log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-                        inputParamType.setValue(oldFiles.get(index));
-                        data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
-                    } else {
-                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
-                        inputParamType.setValue(stageInputFile);
-                        StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
-                        status.setTransferState(TransferState.UPLOAD);
-                        detail.setTransferStatus(status);
-                        detail.setTransferDescription("Input Data Staged: " + stageInputFile);
-                        experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-                        GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                    }
-                } // FIXME: what is the thrift model DataType equivalent for URIArray type?
-//                else if ("URIArray".equals(inputParamType.getType().getType().toString())) {
-//                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-//                    List<String> newFiles = new ArrayList<String>();
-//                    for (String paramValueEach : split) {
-//                        if (index < oldIndex) {
-//                            log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-//                            newFiles.add(oldFiles.get(index));
-//                            data.append(oldFiles.get(index++)).append(",");
-//                        } else {
-//                            String stageInputFiles = stageInputFiles(remoteCluster, jobExecutionContext, paramValueEach);
-//                            status.setTransferState(TransferState.UPLOAD);
-//                            detail.setTransferStatus(status);
-//                            detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
-//                            registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-//                            StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
-//                            GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-//                            newFiles.add(stageInputFiles);
-//                        }
-//
-//                    }
-//                    ((URIArrayType) inputParamType.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
-//                }
-                inputNew.getParameters().put(paramName, inputParamType);
-            }
-        } catch (Exception e) {
-            log.error(e.getMessage());
-            status.setTransferState(TransferState.FAILED);
-            detail.setTransferDescription(e.getLocalizedMessage());
-            detail.setTransferStatus(status);
-            try {
-                GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-            } catch (Exception e1) {
-                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-        jobExecutionContext.setInMessageContext(inputNew);
-    }
-
-    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-        int i = paramValue.lastIndexOf(File.separator);
-        String substring = paramValue.substring(i + 1);
-        try {
-            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
-            if (paramValue.startsWith("file")) {
-                paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-            }
-            boolean success = false;
-            int j = 1;
-            while(!success){
-            try {
-				remoteCluster.scpTo(targetFile, paramValue);
-				success = true;
-			} catch (Exception e) {
-				log.info(e.getLocalizedMessage());
-				Thread.sleep(2000);
-				 if(j==3) {
-					throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-				 }
-            }
-            j++;
-            }
-            return targetFile;
-        } catch (Exception e) {
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        this.invoke(jobExecutionContext);
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
deleted file mode 100644
index a28d290..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
+++ /dev/null
@@ -1,323 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsissh.handler;
-
-//import org.apache.airavata.commons.gfac.type.ActualParameter;
-//import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.impl.OutputUtils;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.DataTransferDetails;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.model.experiment.TransferState;
-import org.apache.airavata.model.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-public class GSISSHOutputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(GSISSHOutputHandler.class);
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        super.invoke(jobExecutionContext);
-        int index = 0;
-        int oldIndex = 0;
-        List<String> oldFiles = new ArrayList<String>();
-        StringBuffer data = new StringBuffer("|");
-        String hostAddress = jobExecutionContext.getHostName();
-        try {
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-            }
-        }  catch (Exception e) {
-        	 try {
-  				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-  			} catch (GFacException e1) {
-  				 log.error(e1.getLocalizedMessage());
-  			}  
-            log.error(e.getMessage());
-            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        }
-        DataTransferDetails detail = new DataTransferDetails();
-        TransferStatus status = new TransferStatus();
-
-        RemoteCluster remoteCluster = null;
-        
-        try {
-            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-            if (remoteCluster == null) {
-                GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-                
-                throw new GFacProviderException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-
-            // Get the Stdouts and StdErrs
-            String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
-            if (pluginData != null) {
-                try {
-                    oldIndex = Integer.parseInt(pluginData.split("\\|")[0].trim());
-                    oldFiles = Arrays.asList(pluginData.split("\\|")[1].split(","));
-                    if (oldIndex == oldFiles.size()) {
-                        log.info("Old data looks good !!!!");
-                    } else {
-                        oldIndex = 0;
-                        oldFiles.clear();
-                    }
-                } catch (NumberFormatException e) {
-                    log.error("Previously stored data " + pluginData + " is wrong so we continue the operations");
-                }
-            }
-
-            String timeStampedExperimentID = GFacUtils.createUniqueNameWithDate(jobExecutionContext.getExperimentID());
-
-            TaskDetails taskData = jobExecutionContext.getTaskData();
-            String outputDataDir = null;
-            File localStdOutFile;
-            File localStdErrFile;
-            //FIXME: AdvancedOutput is remote location and third party transfer should work to make this work 
-//            if (taskData.getAdvancedOutputDataHandling() != null) {
-//                outputDataDir = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
-//            }
-            if (outputDataDir == null) {
-                outputDataDir = File.separator + "tmp";
-            }
-            outputDataDir = outputDataDir + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID();
-            (new File(outputDataDir)).mkdirs();
-         	
-            String stdOutStr = "";
-            if (index < oldIndex) {
-                localStdOutFile = new File(oldFiles.get(index));
-                data.append(oldFiles.get(index++)).append(",");
-            } else {
-            	int i = 0;
-                localStdOutFile = new File(outputDataDir + File.separator + jobExecutionContext.getApplicationName() + ".stdout");
-                while(stdOutStr.isEmpty()){
-                try {
-                	remoteCluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
-                	stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
-				} catch (Exception e) {
-					log.error(e.getLocalizedMessage());
-					  Thread.sleep(2000);
-		        }
-                i++;
-                if(i==3)break;
-                }
-                
-                StringBuffer temp = new StringBuffer(data.append(localStdOutFile.getAbsolutePath()).append(",").toString());
-                GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-            }
-            if (index < oldIndex) {
-                localStdErrFile = new File(oldFiles.get(index));
-                data.append(oldFiles.get(index++)).append(",");
-            } else {
-                localStdErrFile = new File(outputDataDir + File.separator + jobExecutionContext.getApplicationName() + ".stderr");
-                remoteCluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
-                StringBuffer temp = new StringBuffer(data.append(localStdErrFile.getAbsolutePath()).append(",").toString());
-                GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-            }
-
-            String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
-            status.setTransferState(TransferState.STDOUT_DOWNLOAD);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("STDOUT:" + localStdOutFile.getAbsolutePath());
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-            status.setTransferState(TransferState.STDERROR_DOWNLOAD);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription("STDERR:" + localStdErrFile.getAbsolutePath());
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-
-            //todo this is a mess we have to fix this
-            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-            Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-            Set<String> keys = output.keySet();
-            for (String paramName : keys) {
-                OutputDataObjectType outputDataObjectType = (OutputDataObjectType) output.get(paramName);
-                if (DataType.URI == outputDataObjectType.getType()) {
-
-                    List<String> outputList = null;
-                    int retry=3;
-                    while(retry>0){
-                    	 outputList = remoteCluster.listDirectory(jobExecutionContext.getOutputDir());
-                        if (outputList.size() == 1 && outputList.get(0).isEmpty()) {
-                            Thread.sleep(10000);
-                        } else if (outputList.size() > 0) {
-                            break;
-                        }else{
-                            Thread.sleep(10000);
-                        }
-                        retry--;
-                        if(retry==0){
-                        }
-                    	 Thread.sleep(10000);
-                    }
-                    if (outputList.size() == 0 || outputList.get(0).isEmpty() || outputList.size() > 1) {
-                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
-                        Set<String> strings = output.keySet();
-                        outputArray.clear();
-                        for (String key : strings) {
-                            OutputDataObjectType outputDataObjectType1 = (OutputDataObjectType) output.get(key);
-                            if (DataType.URI == outputDataObjectType1.getType()) {
-                                String downloadFile = outputDataObjectType1.getValue();
-                                String localFile;
-                                if (index < oldIndex) {
-                                    localFile = oldFiles.get(index);
-                                    data.append(oldFiles.get(index++)).append(",");
-                                } else {
-                                    remoteCluster.scpFrom(downloadFile, outputDataDir);
-                                    String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-                                    localFile = outputDataDir + File.separator + fileName;
-                                    StringBuffer temp = new StringBuffer(data.append(localFile).append(",").toString());
-                                    GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                                }
-                                jobExecutionContext.addOutputFile(localFile);
-                                outputDataObjectType1.setValue(localFile);
-                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                                dataObjectType.setValue(localFile);
-                                dataObjectType.setName(key);
-                                dataObjectType.setType(DataType.URI);
-                                outputArray.add(dataObjectType);
-                            }else if (DataType.STDOUT == outputDataObjectType1.getType()) {
-                                String localFile;
-                                if (index < oldIndex) {
-                                    localFile = oldFiles.get(index);
-                                    data.append(oldFiles.get(index++)).append(",");
-                                } else {
-                                    String fileName = localStdOutFile.getName();
-                                    localFile = outputDataDir + File.separator + fileName;
-                                    StringBuffer temp = new StringBuffer(data.append(localFile).append(",").toString());
-                                    GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                                }
-                                jobExecutionContext.addOutputFile(localFile);
-                                outputDataObjectType1.setValue(localFile);
-                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                                dataObjectType.setValue(localFile);
-                                dataObjectType.setName(key);
-                                dataObjectType.setType(DataType.STDOUT);
-                                outputArray.add(dataObjectType);
-                            }else if (DataType.STDERR == outputDataObjectType1.getType()) {
-                                String localFile;
-                                if (index < oldIndex) {
-                                    localFile = oldFiles.get(index);
-                                    data.append(oldFiles.get(index++)).append(",");
-                                } else {
-                                    String fileName = localStdErrFile.getName();
-                                    localFile = outputDataDir + File.separator + fileName;
-                                    StringBuffer temp = new StringBuffer(data.append(localFile).append(",").toString());
-                                    GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                                }
-                                jobExecutionContext.addOutputFile(localFile);
-                                outputDataObjectType1.setValue(localFile);
-                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
-                                dataObjectType.setValue(localFile);
-                                dataObjectType.setName(key);
-                                dataObjectType.setType(DataType.STDERR);
-                                outputArray.add(dataObjectType);
-                            }
-                        }
-                        break;
-                    } else if(outputList.size() == 1) { //FIXME: this is ultrascan specific
-                        String valueList = outputList.get(0);
-                        String outputFile;
-                        if (index < oldIndex) {
-                            outputFile = oldFiles.get(index);
-                            data.append(oldFiles.get(index++)).append(",");
-                        } else {
-                            remoteCluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
-                            outputFile = outputDataDir + File.separator + valueList;
-                            jobExecutionContext.addOutputFile(outputFile);
-                            StringBuffer temp = new StringBuffer(data.append(outputFile).append(",").toString());
-                            GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                        }
-                        jobExecutionContext.addOutputFile(outputFile);
-                        outputDataObjectType.setValue(outputFile);
-                        OutputDataObjectType dataObjectType  = new OutputDataObjectType();
-                        dataObjectType.setValue(valueList);
-                        dataObjectType.setName(paramName);
-                        dataObjectType.setType(DataType.URI);
-                        outputArray.add(dataObjectType);
-                    }
-                } else {
-                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
-//                    break;
-                }
-            }
-            if (outputArray == null || outputArray.isEmpty()) {
-                if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null){
-                throw new GFacHandlerException(
-                        "Empty Output returned from the Application, Double check the application"
-                                + "and ApplicationDescriptor output Parameter Names"
-                );
-                }
-            }
-            // Why we set following?
-            jobExecutionContext.setStandardError(localStdErrFile.getAbsolutePath());
-            jobExecutionContext.setStandardOutput(localStdOutFile.getAbsolutePath());
-            jobExecutionContext.setOutputDir(outputDataDir);
-            status.setTransferState(TransferState.DOWNLOAD);
-            detail.setTransferStatus(status);
-            detail.setTransferDescription(outputDataDir);
-            experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-            experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-            fireTaskOutputChangeEvent(jobExecutionContext, outputArray);
-        } catch (Exception e) {
-            try {
-                status.setTransferState(TransferState.FAILED);
-                detail.setTransferStatus(status);
-                detail.setTransferDescription(e.getLocalizedMessage());
-                experimentCatalog.add(ExpCatChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-                GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-            } catch (Exception e1) {
-                throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-            }
-            throw new GFacHandlerException("Error in retrieving results", e);
-        }
-     }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        this.invoke(jobExecutionContext);
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java
deleted file mode 100644
index 7000105..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java
+++ /dev/null
@@ -1,83 +0,0 @@
-package org.apache.airavata.gfac.gsissh.handler;
-
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.gfac.ssh.util.HandleOutputs;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.registry.cpi.ExpCatChildDataType;
-import org.apache.airavata.registry.cpi.RegistryException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class NewGSISSHOutputHandler extends AbstractHandler{
-	 private static final Logger log = LoggerFactory.getLogger(NewGSISSHOutputHandler.class);
-	  public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-	        super.invoke(jobExecutionContext);
-	        String hostAddress = jobExecutionContext.getHostName();
-	        try {
-	            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-	                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-	            }
-	        }  catch (Exception e) {
-	        	 try {
-	  				GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-	  			} catch (GFacException e1) {
-	  				 log.error(e1.getLocalizedMessage());
-	  			}  
-	            log.error(e.getMessage());
-	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-	        }
-	        RemoteCluster remoteCluster = null;
-	        
-	        try {
-	            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
-	            if (remoteCluster == null) {
-	                GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-	                
-	                throw new GFacProviderException("Security context is not set properly");
-	            } else {
-	                log.info("Successfully retrieved the Security Context");
-	            }
-	        } catch (Exception e) {
-	            log.error(e.getMessage());
-	            try {
-	                GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-	            } catch (GFacException e1) {
-	                log.error(e1.getLocalizedMessage());
-	            }
-	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-	        }
-
-	        super.invoke(jobExecutionContext);
-	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
-            try {
-				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-			} catch (RegistryException e) {
-				throw new GFacHandlerException(e);
-			}
-	    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    @Override
-	public void initProperties(Properties properties) throws GFacHandlerException {
-		// TODO Auto-generated method stub
-		
-	}
-
-}


[4/7] airavata git commit: Removed gsi related code

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
deleted file mode 100644
index 413b5dc..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
+++ /dev/null
@@ -1,344 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsissh.provider.impl;
-
-import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.apache.airavata.common.exception.AiravataException;
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
-import org.apache.airavata.gfac.monitor.email.EmailMonitorFactory;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.experiment.CorrectiveAction;
-import org.apache.airavata.model.experiment.ErrorCategory;
-import org.apache.airavata.model.experiment.JobDetails;
-import org.apache.airavata.model.experiment.JobState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Map;
-
-//import org.apache.airavata.schemas.gfac.GsisshHostType;
-
-public class GSISSHProvider extends AbstractProvider {
-    private static final Logger log = LoggerFactory.getLogger(GSISSHProvider.class);
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-
-    }
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        super.initialize(jobExecutionContext);
-        try {
-            String hostAddress = jobExecutionContext.getHostName();
-            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
-                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-            }
-        } catch (ApplicationSettingsException e) {
-            log.error(e.getMessage());
-            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        } catch (GFacException e) {
-            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-        }
-    }
-
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        log.info("Invoking GSISSH Provider Invoke ...");
-        StringBuffer data = new StringBuffer();
-        ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
-                .getComputeResourceDescription();
-        ApplicationDeploymentDescription appDeployDesc = jobExecutionContext.getApplicationContext()
-                .getApplicationDeploymentDescription();
-        JobDetails jobDetails = new JobDetails();
-        RemoteCluster remoteCluster = null;
-
-        try {
-            if (jobExecutionContext.getSecurityContext(jobExecutionContext.getHostName()) != null) {
-                remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(jobExecutionContext.getHostName())).getRemoteCluster();
-            }
-            if (remoteCluster == null) {
-                throw new GFacProviderException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-            // This installed path is a mandetory field, because this could change based on the computing resource
-            JobDescriptor jobDescriptor = GFACGSISSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
-            jobDetails.setJobName(jobDescriptor.getJobName());
-
-            log.info(jobDescriptor.toXML());
-            data.append("jobDesc=").append(jobDescriptor.toXML());
-            jobDetails.setJobDescription(jobDescriptor.toXML());
-            String jobID = remoteCluster.submitBatchJob(jobDescriptor);
-            jobExecutionContext.setJobDetails(jobDetails);
-            if (jobID == null) {
-                jobDetails.setJobID("none");
-                GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-            } else {
-                jobDetails.setJobID(jobID.split("\\.")[0]);
-                GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SUBMITTED);
-            }
-            data.append(",jobId=").append(jobDetails.getJobID());
-
-            // Now job has submitted to the resource, its up to the Provider to parse the information to daemon handler
-            // to perform monitoring, daemon handlers can be accessed from anywhere
-            monitor(jobExecutionContext);
-            // we know this host is type GsiSSHHostType
-        } catch (Exception e) {
-		    String error = "Error submitting the job to host " + computeResourceDescription.getHostName() + " message: " + e.getMessage();
-            log.error(error);
-            jobDetails.setJobID("none");
-            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-            StringWriter errors = new StringWriter();
-            e.printStackTrace(new PrintWriter(errors));
-            GFacUtils.saveErrorDetails(jobExecutionContext,  errors.toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-            throw new GFacProviderException(error, e);
-        } finally {
-            log.info("Saving data for future recovery: ");
-            log.info(data.toString());
-            GFacUtils.saveHandlerData(jobExecutionContext, data, this.getClass().getName());
-        } 
-          
-    }
-
-    public void removeFromMonitorHandlers(JobExecutionContext jobExecutionContext, SSHJobSubmission sshJobSubmission, String jobID) throws GFacHandlerException {
-/*        List<ThreadedHandler> daemonHandlers = BetterGfacImpl.getDaemonHandlers();
-        if (daemonHandlers == null) {
-            daemonHandlers = BetterGfacImpl.getDaemonHandlers();
-        }
-        ThreadedHandler pullMonitorHandler = null;
-        ThreadedHandler pushMonitorHandler = null;
-        MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
-        for (ThreadedHandler threadedHandler : daemonHandlers) {
-            if ("org.apache.airavata.gfac.monitor.handlers.GridPullMonitorHandler".equals(threadedHandler.getClass().getName())) {
-                pullMonitorHandler = threadedHandler;
-                if (monitorMode == null || monitorMode == MonitorMode.POLL_JOB_MANAGER) {
-                    jobExecutionContext.setProperty("cancel","true");
-                    pullMonitorHandler.invoke(jobExecutionContext);
-                } else {
-                    log.error("Currently we only support Pull and Push monitoring and monitorMode should be PULL" +
-                            " to handle by the GridPullMonitorHandler");
-                }
-            } else if ("org.apache.airavata.gfac.monitor.handlers.GridPushMonitorHandler".equals(threadedHandler.getClass().getName())) {
-                pushMonitorHandler = threadedHandler;
-                if ( monitorMode == null || monitorMode == MonitorMode.XSEDE_AMQP_SUBSCRIBE) {
-                    pushMonitorHandler.invoke(jobExecutionContext);
-                } else {
-                    log.error("Currently we only support Pull and Push monitoring and monitorMode should be PUSH" +
-                            " to handle by the GridPushMonitorHandler");
-                }
-            }
-            // have to handle the GridPushMonitorHandler logic
-        }
-        if (pullMonitorHandler == null && pushMonitorHandler == null && ExecutionMode.ASYNCHRONOUS.equals(jobExecutionContext.getGFacConfiguration().getExecutionMode())) {
-            log.error("No Daemon handler is configured in gfac-config.xml, either pull or push, so monitoring will not invoked" +
-                    ", execution is configured as asynchronous, so Outhandler will not be invoked");
-        }*/
-    }
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public boolean cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
-        //To change body of implemented methods use File | Settings | File Templates.
-        log.info("canceling the job status in GSISSHProvider!!!!!");
-        JobDetails jobDetails = jobExecutionContext.getJobDetails();
-        String hostName = jobExecutionContext.getHostName();
-        try {
-            RemoteCluster remoteCluster = null;
-            if (jobExecutionContext.getSecurityContext(hostName) == null) {
-                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-            }
-            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostName)).getRemoteCluster();
-            if (remoteCluster == null) {
-                throw new GFacProviderException("Security context is not set properly");
-            } else {
-                log.info("Successfully retrieved the Security Context");
-            }
-            // This installed path is a mandetory field, because this could change based on the computing resource
-            if(jobDetails == null) {
-                log.error("There is not JobDetails so cancelations cannot perform !!!");
-                return false;
-            }
-            if (jobDetails.getJobID() != null) {
-                // if this operation success without any exceptions, we can assume cancel operation succeeded.
-                remoteCluster.cancelJob(jobDetails.getJobID());
-            } else {
-                log.error("No Job Id is set, so cannot perform the cancel operation !!!");
-                return false;
-            }
-            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
-            return true;
-            // we know this host is type GsiSSHHostType
-        } catch (SSHApiException e) {
-            String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-            log.error(error);
-            jobDetails.setJobID("none");
-            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-            GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-            throw new GFacProviderException(error, e);
-        } catch (Exception e) {
-            String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
-            log.error(error);
-            jobDetails.setJobID("none");
-            GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
-            GFacUtils.saveErrorDetails(jobExecutionContext,  e.getCause().toString(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-            throw new GFacProviderException(error, e);
-        }
-    }
-
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
-        // have to implement the logic to recover a gfac failure
-        log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
-        ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
-                .getComputeResourceDescription();
-        String hostName = jobExecutionContext.getHostName();
-        String jobId = "";
-        String jobDesc = "";
-        try {
-            String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
-            String[] split = pluginData.split(",");
-            if (split.length < 2) {
-                try {
-                    this.execute(jobExecutionContext);
-                } catch (GFacException e) {
-                    log.error("Error while  recovering provider", e);
-                    throw new GFacProviderException("Error recovering provider", e);
-                }
-                return;
-            }
-            jobDesc = split[0].substring(7);
-            jobId = split[1].substring(6);
-
-            log.info("Following data have recovered: ");
-            log.info("Job Description: " + jobDesc);
-            log.info("Job Id: " + jobId);
-            if (jobId == null || "none".equals(jobId) ||
-                    "".equals(jobId)) {
-                try {
-                    this.execute(jobExecutionContext);
-                } catch (GFacException e) {
-                    log.error("Error while  recovering provider", e);
-                    throw new GFacProviderException("Error recovering provider", e);
-                }
-                return;
-            }
-        } catch (Exception e) {
-            log.error("Error while  recovering provider", e);
-        }
-        try {
-            // Now we are we have enough data to recover
-            JobDetails jobDetails = new JobDetails();
-            jobDetails.setJobDescription(jobDesc);
-            jobDetails.setJobID(jobId);
-            jobExecutionContext.setJobDetails(jobDetails);
-            if (jobExecutionContext.getSecurityContext(hostName) == null) {
-                try {
-                    GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
-                } catch (ApplicationSettingsException e) {
-                    log.error(e.getMessage());
-                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
-                }
-            }
-            monitor(jobExecutionContext);
-        } catch (Exception e) {
-            log.error("Error while recover the job", e);
-            throw new GFacProviderException("Error delegating already ran job to Monitoring", e);
-        }
-    }
-
-    @Override
-    public void monitor(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-        String jobSubmissionInterfaceId = jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId();
-        SSHJobSubmission sshJobSubmission = null;
-        try {
-            sshJobSubmission = jobExecutionContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
-        } catch (AppCatalogException e) {
-            throw new GFacException("Error while reading compute resource", e);
-        }
-        if (jobExecutionContext.getPreferredJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
-            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
-            if (monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR) {
-                try {
-                    EmailBasedMonitor emailBasedMonitor = EmailMonitorFactory.getEmailBasedMonitor(
-                            sshJobSubmission.getResourceJobManager().getResourceJobManagerType());
-                    emailBasedMonitor.addToJobMonitorMap(jobExecutionContext);
-                } catch (AiravataException e) {
-                    throw new GFacHandlerException("Error while activating email job monitoring ", e);
-                }
-                return;
-            }
-        }
-/*
-        // if email monitor is not activeated or not configure we use pull or push monitor
-        List<ThreadedHandler> daemonHandlers = BetterGfacImpl.getDaemonHandlers();
-        if (daemonHandlers == null) {
-            daemonHandlers = BetterGfacImpl.getDaemonHandlers();
-        }
-        ThreadedHandler pullMonitorHandler = null;
-        ThreadedHandler pushMonitorHandler = null;
-        MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
-        String jobID = jobExecutionContext.getJobDetails().getJobID();
-        for (ThreadedHandler threadedHandler : daemonHandlers) {
-            if ("org.apache.airavata.gfac.monitor.handlers.GridPullMonitorHandler".equals(threadedHandler.getClass().getName())) {
-                pullMonitorHandler = threadedHandler;
-                if (monitorMode == null || monitorMode == MonitorMode.POLL_JOB_MANAGER) {
-                    log.info("Job is launched successfully now parsing it to monitoring in pull mode, JobID Returned:  " + jobID);
-                    pullMonitorHandler.invoke(jobExecutionContext);
-                } else {
-                    log.error("Currently we only support Pull and Push monitoring and monitorMode should be PULL" +
-                            " to handle by the GridPullMonitorHandler");
-                }
-            } else if ("org.apache.airavata.gfac.monitor.handlers.GridPushMonitorHandler".equals(threadedHandler.getClass().getName())) {
-                pushMonitorHandler = threadedHandler;
-                if (monitorMode == null || monitorMode == MonitorMode.XSEDE_AMQP_SUBSCRIBE) {
-                    log.info("Job is launched successfully now parsing it to monitoring in push mode, JobID Returned:  " + jobID);
-                    pushMonitorHandler.invoke(jobExecutionContext);
-                } else {
-                    log.error("Currently we only support Pull and Push monitoring and monitorMode should be PUSH" +
-                            " to handle by the GridPushMonitorHandler");
-                }
-            }
-            // have to handle the GridPushMonitorHandler logic
-        }
-        if (pullMonitorHandler == null && pushMonitorHandler == null && ExecutionMode.ASYNCHRONOUS.equals(jobExecutionContext.getGFacConfiguration().getExecutionMode())) {
-            log.error("No Daemon handler is configured in gfac-config.xml, either pull or push, so monitoring will not invoked" +
-                    ", execution is configured as asynchronous, so Outhandler will not be invoked");
-
-        }*/
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java
deleted file mode 100644
index 85216b4..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsissh.security;
-
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.gfac.core.context.AbstractSecurityContext;
-import org.apache.airavata.gfac.core.RequestData;
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Handles GRID related security.
- */
-public class GSISecurityContext extends AbstractSecurityContext {
-
-    protected static final Logger log = LoggerFactory.getLogger(GSISecurityContext.class);
-    /*
-     * context name
-     */
-
-    private RemoteCluster remoteCluster = null;
-
-
-    public GSISecurityContext(CredentialReader credentialReader, RequestData requestData, RemoteCluster remoteCluster) {
-        super(credentialReader, requestData);
-        this.remoteCluster = remoteCluster;
-    }
-
-
-    public GSISecurityContext(CredentialReader credentialReader, RequestData requestData) {
-        super(credentialReader, requestData);
-    }
-
-
-    public GSISecurityContext(RemoteCluster remoteCluster) {
-        this.setRemoteCluster(remoteCluster);
-    }
-
-
-
-    public RemoteCluster getRemoteCluster() {
-        return remoteCluster;
-    }
-
-    public void setRemoteCluster(RemoteCluster remoteCluster) {
-        this.remoteCluster = remoteCluster;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/TokenizedMyProxyAuthInfo.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/TokenizedMyProxyAuthInfo.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/TokenizedMyProxyAuthInfo.java
deleted file mode 100644
index 36cb84f..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/TokenizedMyProxyAuthInfo.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsissh.security;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.credential.store.credential.Credential;
-import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.gfac.core.GFacConstants;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.RequestData;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-import org.globus.gsi.X509Credential;
-import org.globus.gsi.gssapi.GlobusGSSCredentialImpl;
-import org.globus.gsi.provider.GlobusProvider;
-import org.globus.myproxy.GetParams;
-import org.globus.myproxy.MyProxy;
-import org.globus.myproxy.MyProxyException;
-import org.gridforum.jgss.ExtendedGSSCredential;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.security.Security;
-import java.security.cert.X509Certificate;
-
-public class TokenizedMyProxyAuthInfo extends GSIAuthenticationInfo {
-    protected static final Logger log = LoggerFactory.getLogger(TokenizedMyProxyAuthInfo.class);
-
-    public static int CREDENTIAL_RENEWING_THRESH_HOLD = 10 * 90;
-
-    private GSSCredential gssCredentials = null;
-
-
-    private CredentialReader credentialReader;
-
-    private RequestData requestData;
-
-    public static final String X509_CERT_DIR = "X509_CERT_DIR";
-
-
-    static {
-        Security.addProvider(new GlobusProvider());
-        try {
-            setUpTrustedCertificatePath();
-        } catch (ApplicationSettingsException e) {
-            log.error(e.getLocalizedMessage(), e);
-        }
-    }
-
-    public static void setUpTrustedCertificatePath(String trustedCertificatePath) {
-
-        File file = new File(trustedCertificatePath);
-
-        if (!file.exists() || !file.canRead()) {
-            File f = new File(".");
-            log.info("Current directory " + f.getAbsolutePath());
-            throw new RuntimeException("Cannot read trusted certificate path " + trustedCertificatePath);
-        } else {
-            System.setProperty(GFacConstants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY, file.getAbsolutePath());
-        }
-    }
-
-    private static void setUpTrustedCertificatePath() throws ApplicationSettingsException {
-
-        String trustedCertificatePath = ServerSettings.getSetting(GFacConstants.TRUSTED_CERT_LOCATION);
-
-        setUpTrustedCertificatePath(trustedCertificatePath);
-    }
-
-    public TokenizedMyProxyAuthInfo(CredentialReader credentialReader, RequestData requestData) {
-        this.credentialReader = credentialReader;
-        this.requestData = requestData;
-        try {
-            properties.setProperty(X509_CERT_DIR, ServerSettings.getSetting(GFacConstants.TRUSTED_CERT_LOCATION));
-        } catch (ApplicationSettingsException e) {
-            log.error("Error while  reading server properties", e);
-        };
-    }
-
-    public TokenizedMyProxyAuthInfo(RequestData requestData) {
-           this.requestData = requestData;
-           try {
-               properties.setProperty(X509_CERT_DIR, ServerSettings.getSetting(GFacConstants.TRUSTED_CERT_LOCATION));
-           } catch (ApplicationSettingsException e) {
-               log.error("Error while  reading server properties", e);
-           };
-       }
-
-    public GSSCredential getCredentials() throws SecurityException {
-
-        if (gssCredentials == null) {
-
-            try {
-                gssCredentials = getCredentialsFromStore();
-            } catch (Exception e) {
-                log.error("An exception occurred while retrieving credentials from the credential store. " +
-                        "Will continue with my proxy user name and password. Provided TokenId:" + requestData.getTokenId(), e);
-            }
-
-            if (gssCredentials == null) {
-                System.out.println("Authenticating with provided token failed, so falling back to authenticate with defaultCredentials");
-                try {
-                    gssCredentials = getDefaultCredentials();
-                } catch (Exception e) {
-                    throw new SecurityException("Error retrieving my proxy using username password");
-                }
-            }
-            // if still null, throw an exception
-            if (gssCredentials == null) {
-                throw new SecurityException("Unable to retrieve my proxy credentials to continue operation.");
-            }
-        } else {
-            try {
-                if (gssCredentials.getRemainingLifetime() < CREDENTIAL_RENEWING_THRESH_HOLD) {
-                    try {
-                        return renewCredentials();
-                    } catch (Exception e) {
-                        throw new SecurityException("Error renewing credentials", e);
-                    }
-                }
-            } catch (GSSException e) {
-                throw new SecurityException("Unable to retrieve remaining life time from credentials.", e);
-            }
-        }
-
-        return gssCredentials;
-    }
-
-
-    /**
-     * Reads the credentials from credential store.
-     *
-     * @return If token is found in the credential store, will return a valid credential. Else returns null.
-     * @throws Exception If an error occurred while retrieving credentials.
-     */
-    public GSSCredential getCredentialsFromStore() throws Exception {
-
-        if (getCredentialReader() == null) {
-        	credentialReader = GFacUtils.getCredentialReader();
-        	if(credentialReader == null){
-        		return null;
-        	}
-        }
-
-        Credential credential = getCredentialReader().getCredential(getRequestData().getGatewayId(),
-                getRequestData().getTokenId());
-
-        if (credential != null) {
-            if (credential instanceof CertificateCredential) {
-
-                log.info("Successfully found credentials for token id - " + getRequestData().getTokenId() +
-                        " gateway id - " + getRequestData().getGatewayId());
-
-                CertificateCredential certificateCredential = (CertificateCredential) credential;
-
-                X509Certificate[] certificates = certificateCredential.getCertificates();
-                X509Credential newCredential = new X509Credential(certificateCredential.getPrivateKey(), certificates);
-
-                GlobusGSSCredentialImpl cred = new GlobusGSSCredentialImpl(newCredential, GSSCredential.INITIATE_AND_ACCEPT);
-                System.out.print(cred.export(ExtendedGSSCredential.IMPEXP_OPAQUE));
-                return cred;
-                //return new GlobusGSSCredentialImpl(newCredential,
-                //        GSSCredential.INITIATE_AND_ACCEPT);
-            } else {
-                log.info("Credential type is not CertificateCredential. Cannot create mapping globus credentials. " +
-                        "Credential type - " + credential.getClass().getName());
-            }
-        } else {
-            log.info("Could not find credentials for token - " + getRequestData().getTokenId() + " and "
-                    + "gateway id - " + getRequestData().getGatewayId());
-        }
-
-        return null;
-    }
-
-    /**
-     * Renew GSSCredentials.
-     * Before executing we need to add current host as a trusted renewer. Note to renew credentials
-     * we dont need user name and password.
-     * To do that execute following command
-     * > myproxy-logon -t <LIFETIME></LIFETIME> -s <MY PROXY SERVER> -l <USER NAME>
-     * E.g :- > myproxy-logon -t 264 -s myproxy.teragrid.org -l us3
-     * Enter MyProxy pass phrase:
-     * A credential has been received for user us3 in /tmp/x509up_u501.
-     * > myproxy-init -A --cert /tmp/x509up_u501 --key /tmp/x509up_u501 -l ogce -s myproxy.teragrid.org
-     *
-     * @return Renewed credentials.
-     * @throws GFacException                            If an error occurred while renewing credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential renewCredentialsAsATrustedHost() throws GFacException, ApplicationSettingsException {
-        MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
-        GetParams getParams = new GetParams();
-        getParams.setAuthzCreds(gssCredentials);
-        getParams.setUserName(getRequestData().getMyProxyUserName());
-        getParams.setLifetime(getRequestData().getMyProxyLifeTime());
-        try {
-            return myproxy.get(gssCredentials, getParams);
-        } catch (MyProxyException e) {
-            throw new GFacException("An error occurred while renewing security credentials.", e);
-        }
-    }
-
-
-    /**
-     * Gets the default proxy certificate.
-     *
-     * @return Default my proxy credentials.
-     * @throws GFacException                            If an error occurred while retrieving credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential getDefaultCredentials() throws GFacException, ApplicationSettingsException {
-        MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
-        try {
-            return myproxy.get(getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
-                    getRequestData().getMyProxyLifeTime());
-        } catch (MyProxyException e) {
-            throw new GFacException("An error occurred while retrieving default security credentials.", e);
-        }
-    }
-
-
-    /**
-     * Renews credentials. First try to renew credentials as a trusted renewer. If that failed
-     * use user name and password to renew credentials.
-     *
-     * @return Renewed credentials.
-     * @throws GFacException                            If an error occurred while renewing credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential renewCredentials() throws GFacException, ApplicationSettingsException {
-
-        // First try to renew credentials as a trusted renewer
-        try {
-            gssCredentials = renewCredentialsAsATrustedHost();
-        } catch (Exception e) {
-            log.warn("Renewing credentials as a trusted renewer failed", e);
-            gssCredentials = getDefaultCredentials();
-        }
-
-        return gssCredentials;
-    }
-
-    /**
-     * Gets a new proxy certificate given current credentials.
-     *
-     * @return The short lived GSSCredentials
-     * @throws GFacException                            If an error is occurred while retrieving credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential getProxyCredentials() throws GFacException, ApplicationSettingsException {
-
-        MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
-        try {
-            return myproxy.get(gssCredentials, getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
-                    getRequestData().getMyProxyLifeTime());
-        } catch (MyProxyException e) {
-            throw new GFacException("An error occurred while renewing security credentials using user/password.", e);
-        }
-    }
-
-    public void setGssCredentials(GSSCredential gssCredentials) {
-        this.gssCredentials = gssCredentials;
-    }
-
-    public CredentialReader getCredentialReader() {
-        return credentialReader;
-    }
-
-    public void setCredentialReader(CredentialReader credentialReader) {
-        this.credentialReader = credentialReader;
-    }
-
-    public RequestData getRequestData() {
-        return requestData;
-    }
-
-    public void setRequestData(RequestData requestData) {
-        this.requestData = requestData;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
deleted file mode 100644
index 89e3571..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ /dev/null
@@ -1,367 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsissh.util;
-
-import org.apache.airavata.gfac.core.cluster.RemoteCluster;
-import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
-import org.apache.airavata.registry.cpi.AppCatalog;
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.RequestData;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.gsissh.security.TokenizedMyProxyAuthInfo;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.gsi.ssh.impl.GSISSHAbstractCluster;
-import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.*;
-import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
-import org.apache.airavata.model.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.util.*;
-
-
-public class GFACGSISSHUtils {
-    private final static Logger logger = LoggerFactory.getLogger(GFACGSISSHUtils.class);
-
-    public static final String PBS_JOB_MANAGER = "pbs";
-    public static final String SLURM_JOB_MANAGER = "slurm";
-    public static final String SUN_GRID_ENGINE_JOB_MANAGER = "UGE";
-    public static final String LSF_JOB_MANAGER = "lsf";
-
-    public static int maxClusterCount = 5;
-    public static Map<String, List<RemoteCluster>> clusters = new HashMap<String, List<RemoteCluster>>();
-
-    public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-        JobSubmissionInterface jobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
-        JobSubmissionProtocol jobProtocol = jobSubmissionInterface.getJobSubmissionProtocol();
-        try {
-            AppCatalog appCatalog = jobExecutionContext.getAppCatalog();
-            SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
-            if (jobProtocol == JobSubmissionProtocol.GLOBUS || jobProtocol == JobSubmissionProtocol.UNICORE
-                    || jobProtocol == JobSubmissionProtocol.CLOUD || jobProtocol == JobSubmissionProtocol.LOCAL) {
-                logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-            } else if (jobProtocol == JobSubmissionProtocol.SSH && sshJobSubmission.getSecurityProtocol() == SecurityProtocol.GSI) {
-                String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-                RequestData requestData = new RequestData(jobExecutionContext.getGatewayID());
-                requestData.setTokenId(credentialStoreToken);
-                HPCRemoteCluster HPCRemoteCluster = null;
-                GSISecurityContext context = null;
-
-                TokenizedMyProxyAuthInfo tokenizedMyProxyAuthInfo = new TokenizedMyProxyAuthInfo(requestData);
-                CredentialReader credentialReader = GFacUtils.getCredentialReader();
-                if (credentialReader != null) {
-                    CertificateCredential credential = null;
-                    try {
-                        credential = (CertificateCredential) credentialReader.getCredential(jobExecutionContext.getGatewayID(), credentialStoreToken);
-                        requestData.setMyProxyUserName(credential.getCommunityUser().getUserName());
-                    } catch (Exception e) {
-                        logger.error(e.getLocalizedMessage());
-                    }
-                }
-
-                String key = requestData.getMyProxyUserName() + jobExecutionContext.getHostName()+
-                        sshJobSubmission.getSshPort();
-                boolean recreate = false;
-                synchronized (clusters) {
-                    if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                        recreate = true;
-                    } else if (clusters.containsKey(key)) {
-                        int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                        if (clusters.get(key).get(i).getSession().isConnected()) {
-                            HPCRemoteCluster = (HPCRemoteCluster) clusters.get(key).get(i);
-                        } else {
-                            clusters.get(key).remove(i);
-                            recreate = true;
-                        }
-                        if (!recreate) {
-                            try {
-                                HPCRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                            } catch (Exception e) {
-                                clusters.get(key).remove(i);
-                                logger.info("Connection found the connection map is expired, so we create from the scratch");
-                                maxClusterCount++;
-                                recreate = true; // we make the HPCRemoteCluster to create again if there is any exception druing connection
-                            }
-                            logger.info("Re-using the same connection used with the connection string:" + key);
-                            context = new GSISecurityContext(tokenizedMyProxyAuthInfo.getCredentialReader(), requestData, HPCRemoteCluster);
-                        }
-                    } else {
-                        recreate = true;
-                    }
-
-                    if (recreate) {
-                        ServerInfo serverInfo = new ServerInfo(requestData.getMyProxyUserName(), jobExecutionContext.getHostName(),
-                                sshJobSubmission.getSshPort());
-
-                        JobManagerConfiguration jConfig = null;
-                        String installedParentPath = sshJobSubmission.getResourceJobManager().getJobManagerBinPath();
-                        String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
-                        if (jobManager == null) {
-                            logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
-                            jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-                        } else {
-                            if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                jConfig = CommonUtils.getPBSJobManager(installedParentPath);
-                            } else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
-                            } else if (SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                jConfig = CommonUtils.getUGEJobManager(installedParentPath);
-                            }else if(LSF_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
-                                jConfig = CommonUtils.getLSFJobManager(installedParentPath);
-                            }
-                        }
-                        HPCRemoteCluster = new HPCRemoteCluster(serverInfo, tokenizedMyProxyAuthInfo, jConfig);
-                        context = new GSISecurityContext(tokenizedMyProxyAuthInfo.getCredentialReader(), requestData, HPCRemoteCluster);
-                        List<RemoteCluster> pbsRemoteClusters = null;
-                        if (!(clusters.containsKey(key))) {
-                            pbsRemoteClusters = new ArrayList<RemoteCluster>();
-                        } else {
-                            pbsRemoteClusters = clusters.get(key);
-                        }
-                        pbsRemoteClusters.add(HPCRemoteCluster);
-                        clusters.put(key, pbsRemoteClusters);
-                    }
-                }
-
-                jobExecutionContext.addSecurityContext(jobExecutionContext.getHostName(), context);
-            }
-        } catch (Exception e) {
-            throw new GFacException("An error occurred while creating GSI security context", e);
-        }
-    }
-
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) {
-        JobDescriptor jobDescriptor = new JobDescriptor();
-        TaskDetails taskData = jobExecutionContext.getTaskData();
-        ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
-        try {
-			if(ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_ENABLE).equalsIgnoreCase("true")){
-				jobDescriptor.setMailOptions(ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_FLAGS));
-				String emailids = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_EMAILIDS);
-
-				if(jobExecutionContext.getTaskData().isSetEmailAddresses()){
-					List<String> emailList = jobExecutionContext.getTaskData().getEmailAddresses();
-					String elist = GFacUtils.listToCsv(emailList, ',');
-					if(emailids != null && !emailids.isEmpty()){
-						emailids = emailids +"," + elist;
-					}else{
-						emailids = elist;
-					}
-				}
-				if(emailids != null && !emailids.isEmpty()){
-					logger.info("Email list: "+ emailids);
-					jobDescriptor.setMailAddress(emailids);
-				}
-			}
-		} catch (ApplicationSettingsException e) {
-			 logger.error("ApplicationSettingsException : " +e.getLocalizedMessage());
-		}
-        // this is common for any application descriptor
-        jobDescriptor.setCallBackIp(ServerSettings.getIp());
-        jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
-        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
-        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
-        jobDescriptor.setExecutablePath(jobExecutionContext.getExecutablePath());
-        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
-        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
-        String computationalProjectAccount = taskData.getTaskScheduling().getComputationalProjectAccount();
-        taskData.getEmailAddresses();
-        if (computationalProjectAccount == null){
-            ComputeResourcePreference computeResourcePreference = jobExecutionContext.getApplicationContext().getComputeResourcePreference();
-            if (computeResourcePreference != null) {
-                computationalProjectAccount = computeResourcePreference.getAllocationProjectNumber();
-            }
-        }
-        if (computationalProjectAccount != null) {
-            jobDescriptor.setAcountString(computationalProjectAccount);
-        }
-
-        Random random = new Random();
-        int i = random.nextInt(Integer.MAX_VALUE); // We always set the job name
-        jobDescriptor.setJobName("A" + String.valueOf(i+99999999));
-        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
-
-        List<String> inputValues = new ArrayList<String>();
-        MessageContext input = jobExecutionContext.getInMessageContext();
-        // sort the inputs first and then build the command List
-        Comparator<InputDataObjectType> inputOrderComparator = new Comparator<InputDataObjectType>() {
-            @Override
-            public int compare(InputDataObjectType inputDataObjectType, InputDataObjectType t1) {
-                return inputDataObjectType.getInputOrder() - t1.getInputOrder();
-            }
-        };
-        Set<InputDataObjectType> sortedInputSet = new TreeSet<InputDataObjectType>(inputOrderComparator);
-        for (Object object : input.getParameters().values()) {
-            if (object instanceof InputDataObjectType) {
-                InputDataObjectType inputDOT = (InputDataObjectType) object;
-                sortedInputSet.add(inputDOT);
-            }
-        }
-        for (InputDataObjectType inputDataObjectType : sortedInputSet) {
-            if (!inputDataObjectType.isRequiredToAddedToCommandLine()) {
-                continue;
-            }
-            if (inputDataObjectType.getApplicationArgument() != null
-                    && !inputDataObjectType.getApplicationArgument().equals("")) {
-                inputValues.add(inputDataObjectType.getApplicationArgument());
-            }
-
-            if (inputDataObjectType.getValue() != null
-                    && !inputDataObjectType.getValue().equals("")) {
-                if (inputDataObjectType.getType() == DataType.URI) {
-                    // set only the relative path
-                    String filePath = inputDataObjectType.getValue();
-                    filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
-                    inputValues.add(filePath);
-                }else {
-                    inputValues.add(inputDataObjectType.getValue());
-                }
-
-            }
-        }
-
-        Map<String, Object> outputParams = jobExecutionContext.getOutMessageContext().getParameters();
-        for (Object outputParam : outputParams.values()) {
-            if (outputParam instanceof OutputDataObjectType) {
-                OutputDataObjectType output = (OutputDataObjectType) outputParam;
-                if (output.getApplicationArgument() != null
-                        && !output.getApplicationArgument().equals("")) {
-                    inputValues.add(output.getApplicationArgument());
-                }
-                if (output.getValue() != null && !output.getValue().equals("") && output.isRequiredToAddedToCommandLine()) {
-                    if (output.getType() == DataType.URI){
-                        String filePath = output.getValue();
-                        filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
-                        inputValues.add(filePath);
-                    }
-                }
-            }
-        }
-        jobDescriptor.setInputValues(inputValues);
-
-        jobDescriptor.setUserName(((GSISSHAbstractCluster) remoteCluster).getServerInfo().getUserName());
-        jobDescriptor.setShellName("/bin/bash");
-        jobDescriptor.setAllEnvExport(true);
-        jobDescriptor.setOwner(((HPCRemoteCluster) remoteCluster).getServerInfo().getUserName());
-
-        ComputationalResourceScheduling taskScheduling = taskData.getTaskScheduling();
-        if (taskScheduling != null) {
-            int totalNodeCount = taskScheduling.getNodeCount();
-            int totalCPUCount = taskScheduling.getTotalCPUCount();
-
-//        jobDescriptor.setJobSubmitter(applicationDeploymentType.getJobSubmitterCommand());
-            if (taskScheduling.getComputationalProjectAccount() != null) {
-                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
-            }
-            if (taskScheduling.getQueueName() != null) {
-                jobDescriptor.setQueueName(taskScheduling.getQueueName());
-            }
-
-            if (totalNodeCount > 0) {
-                jobDescriptor.setNodes(totalNodeCount);
-            }
-            if (taskScheduling.getComputationalProjectAccount() != null) {
-                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
-            }
-            if (taskScheduling.getQueueName() != null) {
-                jobDescriptor.setQueueName(taskScheduling.getQueueName());
-            }
-            if (totalCPUCount > 0) {
-                int ppn = totalCPUCount / totalNodeCount;
-                jobDescriptor.setProcessesPerNode(ppn);
-                jobDescriptor.setCPUCount(totalCPUCount);
-            }
-            if (taskScheduling.getWallTimeLimit() > 0) {
-                jobDescriptor.setMaxWallTime(String.valueOf(taskScheduling.getWallTimeLimit()));
-                if(resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)){
-                    jobDescriptor.setMaxWallTimeForLSF(String.valueOf(taskScheduling.getWallTimeLimit()));
-                }
-            }
-
-            if (taskScheduling.getTotalPhysicalMemory() > 0) {
-                jobDescriptor.setUsedMemory(taskScheduling.getTotalPhysicalMemory() + "");
-            }
-        } else {
-            logger.error("Task scheduling cannot be null at this point..");
-        }
-
-        ApplicationDeploymentDescription appDepDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        List<String> moduleCmds = appDepDescription.getModuleLoadCmds();
-        if (moduleCmds != null) {
-            for (String moduleCmd : moduleCmds) {
-                jobDescriptor.addModuleLoadCommands(moduleCmd);
-            }
-        }
-        List<String> preJobCommands = appDepDescription.getPreJobCommands();
-        if (preJobCommands != null) {
-            for (String preJobCommand : preJobCommands) {
-                jobDescriptor.addPreJobCommand(parseCommand(preJobCommand, jobExecutionContext));
-            }
-        }
-
-        List<String> postJobCommands = appDepDescription.getPostJobCommands();
-        if (postJobCommands != null) {
-            for (String postJobCommand : postJobCommands) {
-                jobDescriptor.addPostJobCommand(parseCommand(postJobCommand, jobExecutionContext));
-            }
-        }
-
-        ApplicationParallelismType parallelism = appDepDescription.getParallelism();
-        if (parallelism != null){
-            if (parallelism == ApplicationParallelismType.MPI || parallelism == ApplicationParallelismType.OPENMP || parallelism == ApplicationParallelismType.OPENMP_MPI){
-                Map<JobManagerCommand, String> jobManagerCommands = resourceJobManager.getJobManagerCommands();
-                if (jobManagerCommands != null && !jobManagerCommands.isEmpty()) {
-                    for (JobManagerCommand command : jobManagerCommands.keySet()) {
-                        if (command == JobManagerCommand.SUBMISSION) {
-                            String commandVal = jobManagerCommands.get(command);
-                            jobDescriptor.setJobSubmitter(commandVal);
-                        }
-                    }
-                }
-            }
-        }
-        return jobDescriptor;
-    }
-
-    private static String parseCommand(String value, JobExecutionContext jobExecutionContext) {
-        String parsedValue = value.replaceAll("\\$workingDir", jobExecutionContext.getWorkingDir());
-        parsedValue = parsedValue.replaceAll("\\$inputDir", jobExecutionContext.getInputDir());
-        parsedValue = parsedValue.replaceAll("\\$outputDir", jobExecutionContext.getOutputDir());
-        return parsedValue;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
index 5ab2e96..a0d3a9b 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
@@ -21,13 +21,27 @@
 package org.apache.airavata.gfac.impl;
 
 import com.google.common.eventbus.EventBus;
+import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.LocalEventPublisher;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.gfac.core.GFacEngine;
 import org.apache.airavata.gfac.core.GFacException;
+import org.apache.airavata.gfac.core.JobManagerConfiguration;
 import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
+import org.apache.airavata.gfac.core.monitor.JobMonitor;
+import org.apache.airavata.gfac.impl.job.LSFJobConfiguration;
+import org.apache.airavata.gfac.impl.job.LSFOutputParser;
+import org.apache.airavata.gfac.impl.job.PBSJobConfiguration;
+import org.apache.airavata.gfac.impl.job.PBSOutputParser;
+import org.apache.airavata.gfac.impl.job.SlurmJobConfiguration;
+import org.apache.airavata.gfac.impl.job.SlurmOutputParser;
+import org.apache.airavata.gfac.impl.job.UGEJobConfiguration;
+import org.apache.airavata.gfac.impl.job.UGEOutputParser;
+import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
+import org.apache.airavata.gfac.monitor.email.EmailMonitorFactory;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
@@ -38,6 +52,10 @@ import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.retry.ExponentialBackoffRetry;
 
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
 import java.util.Map;
 
 public abstract class Factory {
@@ -46,6 +64,8 @@ public abstract class Factory {
 	private static Map<String, RemoteCluster> remoteClusterMap;
 	private static LocalEventPublisher localEventPublisher;
 	private static CuratorFramework curatorClient;
+	private static EmailBasedMonitor emailBasedMonitor;
+	private static Date startMonitorDate = Calendar.getInstance().getTime();
 
 	public static GFacEngine getGFacEngine() throws GFacException {
 		if (engine == null) {
@@ -93,4 +113,37 @@ public abstract class Factory {
 		}
 		return curatorClient;
 	}
+
+	public static JobMonitor getJobMonitor(ResourceJobManagerType resourceJobManagerType) throws AiravataException {
+		if (resourceJobManagerType == ResourceJobManagerType.FORK) {
+			return null; // TODO write a job monitor for this.
+		} else {
+			if (emailBasedMonitor == null) {
+				synchronized (EmailMonitorFactory.class){
+					if (emailBasedMonitor == null) {
+						emailBasedMonitor = new EmailBasedMonitor(resourceJobManagerType);
+						emailBasedMonitor.setDate(startMonitorDate);
+						new Thread(emailBasedMonitor).start();
+					}
+				}
+			}
+			return emailBasedMonitor;
+		}
+	}
+
+	public static JobManagerConfiguration getPBSJobManager(String installedPath) {
+		return new PBSJobConfiguration("PBSTemplate.xslt",".pbs", installedPath, new PBSOutputParser());
+	}
+
+	public static JobManagerConfiguration getSLURMJobManager(String installedPath) {
+		return new SlurmJobConfiguration("SLURMTemplate.xslt", ".slurm", installedPath, new SlurmOutputParser());
+	}
+
+	public static JobManagerConfiguration getUGEJobManager(String installedPath) {
+		return new UGEJobConfiguration("UGETemplate.xslt", ".pbs", installedPath, new UGEOutputParser());
+	}
+
+	public static JobManagerConfiguration getLSFJobManager(String installedPath) {
+		return new LSFJobConfiguration("LSFTemplate.xslt", ".lsf", installedPath, new LSFOutputParser());
+	}
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/HPCRemoteCluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/HPCRemoteCluster.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/HPCRemoteCluster.java
new file mode 100644
index 0000000..645cb30
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/HPCRemoteCluster.java
@@ -0,0 +1,332 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl;
+
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.JSch;
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.Session;
+import com.jcraft.jsch.UserInfo;
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
+import org.apache.airavata.gfac.core.SSHApiException;
+import org.apache.airavata.gfac.core.authentication.SSHKeyAuthentication;
+import org.apache.airavata.gfac.core.cluster.CommandInfo;
+import org.apache.airavata.gfac.core.cluster.CommandOutput;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+import org.apache.airavata.gfac.core.cluster.ServerInfo;
+import org.apache.airavata.gfac.core.JobManagerConfiguration;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * One Remote cluster instance for each compute resource.
+ */
+public class HPCRemoteCluster implements RemoteCluster{
+    private static final Logger log = LoggerFactory.getLogger(HPCRemoteCluster.class);
+	private final SSHKeyAuthentication authentication;
+	private final ServerInfo serverInfo;
+	private final JobManagerConfiguration jobManagerConfiguration;
+	private final JSch jSch;
+	private Session session;
+	private OutputParser outputParser;
+
+	public HPCRemoteCluster(ServerInfo serverInfo, JobManagerConfiguration jobManagerConfiguration, AuthenticationInfo
+			authenticationInfo, OutputParser outputParser) throws AiravataException {
+		try {
+			this.serverInfo = serverInfo;
+			this.jobManagerConfiguration = jobManagerConfiguration;
+			if (authenticationInfo instanceof SSHKeyAuthentication) {
+				authentication = (SSHKeyAuthentication) authenticationInfo;
+			} else {
+				throw new AiravataException("Support ssh key authentication only");
+			}
+			this.outputParser = outputParser;
+			jSch = new JSch();
+			jSch.addIdentity(authentication.getPrivateKeyFilePath(), authentication.getPublicKeyFilePath(), authentication
+					.getPassphrase().getBytes());
+			session = jSch.getSession(serverInfo.getUserName(), serverInfo.getHost(), serverInfo.getPort());
+			session.setUserInfo(new DefaultUserInfo(serverInfo.getUserName(), null, authentication.getPassphrase()));
+			session.connect(); // 0 connection timeout
+		} catch (JSchException e) {
+			throw new AiravataException("JSch initialization error ", e);
+		}
+	}
+
+	@Override
+	public String submitBatchJob(String jobScriptFilePath, String workingDirectory) throws SSHApiException {
+		scpTo(jobScriptFilePath, workingDirectory); // scp script file to working directory
+		RawCommandInfo submitCommand = jobManagerConfiguration.getSubmitCommand(workingDirectory, jobScriptFilePath);
+
+		StandardOutReader reader = new StandardOutReader();
+		executeCommand(submitCommand, reader);
+		throwExceptionOnError(reader, submitCommand);
+		return outputParser.parseJobSubmission(reader.getStdOutputString());
+	}
+
+	@Override
+	public void scpTo(String localFile, String remoteFile) throws SSHApiException {
+		int retry = 3;
+		while (retry > 0) {
+			try {
+				if (!session.isConnected()) {
+					session.connect();
+				}
+				log.info("Transferring localhost:" + localFile  + " to " + serverInfo.getHost() + ":" + remoteFile);
+				SSHUtils.scpTo(localFile, remoteFile, session);
+				retry = 0;
+			} catch (Exception e) {
+				retry--;
+				if (!session.isConnected()) {
+					try {
+						session.connect();
+					} catch (JSchException e1) {
+						throw new SSHApiException("JSch Session connection failed");
+					}
+				}
+				if (retry == 0) {
+					throw new SSHApiException("Failed to scp localhost:" + localFile + " to " + serverInfo.getHost() +
+							":" + remoteFile, e);
+				} else {
+					log.info("Retry transfer localhost:" + localFile + " to " + serverInfo.getHost() + ":" +
+							remoteFile);
+				}
+			}
+		}
+	}
+
+	@Override
+	public void scpFrom(String remoteFile, String localFile) throws SSHApiException {
+		int retry = 3;
+		while(retry>0) {
+			try {
+				if (!session.isConnected()) {
+					session.connect();
+				}
+				log.info("Transferring " + serverInfo.getHost() + ":" + remoteFile + " To localhost:" + localFile);
+				SSHUtils.scpFrom(remoteFile, localFile, session);
+				retry=0;
+			} catch (Exception e) {
+				retry--;
+				if (!session.isConnected()) {
+					try {
+						session.connect();
+					} catch (JSchException e1) {
+						throw new SSHApiException("JSch Session connection failed");
+					}
+				}
+				if (retry == 0) {
+					throw new SSHApiException("Failed to scp " + serverInfo.getHost() + ":" + remoteFile + " to " +
+							"localhost:" + localFile, e);
+				} else {
+					log.info("Retry transfer " + serverInfo.getHost() + ":" + remoteFile + "  to localhost:" + localFile);
+				}
+			}
+		}
+	}
+
+	@Override
+	public void scpThirdParty(String remoteFileSource, String remoteFileTarget) throws SSHApiException {
+		try {
+			if(!session.isConnected()){
+				session.connect();
+			}
+			log.info("Transferring from:" + remoteFileSource + " To: " + remoteFileTarget);
+			SSHUtils.scpThirdParty(remoteFileSource, remoteFileTarget, session);
+		} catch (IOException | JSchException e) {
+			throw new SSHApiException("Failed scp file:" + remoteFileSource + " to remote file "
+					+remoteFileTarget , e);
+		}
+	}
+
+	@Override
+	public void makeDirectory(String directoryPath) throws SSHApiException {
+		try {
+			if (!session.isConnected()) {
+				session.connect();
+			}
+			log.info("Creating directory: " + serverInfo.getHost() + ":" + directoryPath);
+			SSHUtils.makeDirectory(directoryPath, session);
+		} catch (JSchException | IOException e) {
+			throw new SSHApiException("Failed to create directory " + serverInfo.getHost() + ":" + directoryPath);
+		}
+	}
+
+	@Override
+	public boolean cancelJob(String jobId) throws SSHApiException {
+		RawCommandInfo cancelCommand = jobManagerConfiguration.getCancelCommand(jobId);
+		StandardOutReader reader = new StandardOutReader();
+		executeCommand(cancelCommand, reader);
+		throwExceptionOnError(reader, cancelCommand);
+		return true;
+	}
+
+	@Override
+	public JobStatus getJobStatus(String jobId) throws SSHApiException {
+		RawCommandInfo monitorCommand = jobManagerConfiguration.getMonitorCommand(jobId);
+		StandardOutReader reader = new StandardOutReader();
+		executeCommand(monitorCommand, reader);
+		throwExceptionOnError(reader, monitorCommand);
+		return outputParser.parseJobStatus(jobId, reader.getStdOutputString());
+	}
+
+	@Override
+	public String getJobIdByJobName(String jobName, String userName) throws SSHApiException {
+		RawCommandInfo jobIdMonitorCommand = jobManagerConfiguration.getJobIdMonitorCommand(jobName, userName);
+		StandardOutReader reader = new StandardOutReader();
+		executeCommand(jobIdMonitorCommand, reader);
+		throwExceptionOnError(reader, jobIdMonitorCommand);
+		return outputParser.parseJobId(jobName, reader.getStdOutputString());
+	}
+
+	@Override
+	public void getJobStatuses(String userName, Map<String, JobStatus> jobStatusMap) throws SSHApiException {
+		RawCommandInfo userBasedMonitorCommand = jobManagerConfiguration.getUserBasedMonitorCommand(userName);
+		StandardOutReader reader = new StandardOutReader();
+		executeCommand(userBasedMonitorCommand, reader);
+		throwExceptionOnError(reader, userBasedMonitorCommand);
+		outputParser.parseJobStatuses(userName, jobStatusMap, reader.getStdOutputString());
+	}
+
+	@Override
+	public List<String> listDirectory(String directoryPath) throws SSHApiException {
+		try {
+			if (!session.isConnected()) {
+				session.connect();
+			}
+			log.info("Creating directory: " + serverInfo.getHost() + ":" + directoryPath);
+			return SSHUtils.listDirectory(directoryPath, session);
+		} catch (JSchException | IOException e) {
+			throw new SSHApiException("Failed to list directory " + serverInfo.getHost() + ":" + directoryPath);
+		}
+	}
+
+	@Override
+	public Session getSession() throws SSHApiException {
+		return session;
+	}
+
+	@Override
+	public void disconnect() throws SSHApiException {
+		session.disconnect();
+	}
+
+	/**
+	 * This method return <code>true</code> if there is an error in standard output. If not return <code>false</code>
+	 * @param reader - command output reader
+	 * @param submitCommand - command which executed in remote machine.
+	 * @return command has return error or not.
+	 */
+	private void throwExceptionOnError(StandardOutReader reader, RawCommandInfo submitCommand) throws SSHApiException{
+		String stdErrorString = reader.getStdErrorString();
+		String command = submitCommand.getCommand().substring(submitCommand.getCommand().lastIndexOf(File.separator)
+				+ 1);
+		if (stdErrorString == null) {
+			// noting to do
+		}else if ((stdErrorString.contains(command.trim()) && !stdErrorString.contains("Warning")) || stdErrorString
+				.contains("error")) {
+			log.error("Command {} , Standard Error output {}", command, stdErrorString);
+			throw new SSHApiException("Error running command " + command + "  on remote cluster. StandardError: " +
+					stdErrorString);
+		}
+	}
+
+	private void executeCommand(CommandInfo commandInfo, CommandOutput commandOutput) throws SSHApiException {
+		String command = commandInfo.getCommand();
+		ChannelExec channelExec = null;
+		try {
+			if (!session.isConnected()) {
+				session.connect();
+			}
+			channelExec = ((ChannelExec) session.openChannel("exec"));
+			channelExec.setCommand(command);
+		    channelExec.setInputStream(null);
+			channelExec.setErrStream(commandOutput.getStandardError());
+			log.info("Executing command {}", commandInfo.getCommand());
+			channelExec.connect();
+			commandOutput.onOutput(channelExec);
+		} catch (JSchException e) {
+			throw new SSHApiException("Unable to execute command - ", e);
+		}finally {
+			//Only disconnecting the channel, session can be reused
+			if (channelExec != null) {
+				channelExec.disconnect();
+			}
+		}
+	}
+
+	@Override
+	public ServerInfo getServerInfo() {
+		return this.serverInfo;
+	}
+
+	private class DefaultUserInfo implements UserInfo {
+
+		private String userName;
+		private String password;
+		private String passphrase;
+
+		public DefaultUserInfo(String userName, String password, String passphrase) {
+			this.userName = userName;
+			this.password = password;
+			this.passphrase = passphrase;
+		}
+
+		@Override
+		public String getPassphrase() {
+			return null;
+		}
+
+		@Override
+		public String getPassword() {
+			return null;
+		}
+
+		@Override
+		public boolean promptPassword(String s) {
+			return false;
+		}
+
+		@Override
+		public boolean promptPassphrase(String s) {
+			return false;
+		}
+
+		@Override
+		public boolean promptYesNo(String s) {
+			return false;
+		}
+
+		@Override
+		public void showMessage(String s) {
+
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/SSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/SSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/SSHUtils.java
index eef6cf3..6a100a9 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/SSHUtils.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/SSHUtils.java
@@ -25,7 +25,6 @@ import com.jcraft.jsch.ChannelExec;
 import com.jcraft.jsch.JSchException;
 import com.jcraft.jsch.Session;
 import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.gsi.ssh.impl.StandardOutReader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/StandardOutReader.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/StandardOutReader.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/StandardOutReader.java
new file mode 100644
index 0000000..e34858b
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/StandardOutReader.java
@@ -0,0 +1,79 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl;
+
+import com.jcraft.jsch.Channel;
+
+import org.apache.airavata.gfac.core.cluster.CommandOutput;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+public class StandardOutReader implements CommandOutput {
+
+    private static final Logger logger = LoggerFactory.getLogger(StandardOutReader.class);
+    String stdOutputString = null;
+    ByteArrayOutputStream errorStream = new ByteArrayOutputStream();
+    public void onOutput(Channel channel) {
+        try {
+            StringBuffer pbsOutput = new StringBuffer("");
+            InputStream inputStream =  channel.getInputStream();
+            byte[] tmp = new byte[1024];
+            do {
+                while (inputStream.available() > 0) {
+                    int i = inputStream.read(tmp, 0, 1024);
+                    if (i < 0) break;
+                    pbsOutput.append(new String(tmp, 0, i));
+                }
+            } while (!channel.isClosed()) ;
+            String output = pbsOutput.toString();
+            this.setStdOutputString(output);
+        } catch (IOException e) {
+            logger.error(e.getMessage(), e);
+        }
+
+    }
+
+
+    public void exitCode(int code) {
+        System.out.println("Program exit code - " + code);
+    }
+
+    public String getStdOutputString() {
+        return stdOutputString;
+    }
+
+    public void setStdOutputString(String stdOutputString) {
+        this.stdOutputString = stdOutputString;
+    }
+
+    public String getStdErrorString() {
+        return errorStream.toString();
+    }
+
+    public OutputStream getStandardError() {
+        return errorStream;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFJobConfiguration.java
new file mode 100644
index 0000000..b84b8ff
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFJobConfiguration.java
@@ -0,0 +1,123 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobManagerConfiguration;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
+import org.apache.commons.io.FilenameUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+
+public class LSFJobConfiguration implements JobManagerConfiguration {
+    private final static Logger logger = LoggerFactory.getLogger(LSFJobConfiguration.class);
+
+    private String jobDescriptionTemplateName;
+
+    private String scriptExtension;
+
+    private String installedPath;
+
+    private OutputParser parser;
+
+    public LSFJobConfiguration(){
+        // this can be used to construct and use setter methods to set all the params in order
+    }
+    public LSFJobConfiguration(String jobDescriptionTemplateName,
+                                 String scriptExtension,String installedPath,OutputParser parser) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+        if (installedPath.endsWith("/") || installedPath.isEmpty()) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+    }
+
+    @Override
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "bkill " + jobID);
+    }
+
+    @Override
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    @Override
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "bjobs " + jobID);
+    }
+
+    @Override
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return new RawCommandInfo(this.installedPath + "bjobs -u " + userName);
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        return new RawCommandInfo(this.installedPath + "bjobs -J " + jobName);
+    }
+
+    @Override
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    @Override
+    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
+        return new RawCommandInfo(this.installedPath + "bsub < " +
+                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+    }
+
+    @Override
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    public void setParser(OutputParser parser) {
+        this.parser = parser;
+    }
+
+    @Override
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+
+    @Override
+    public String getBaseCancelCommand() {
+        return "bkill";
+    }
+
+    @Override
+    public String getBaseMonitorCommand() {
+        return "bjobs";
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return "bsub";
+    }
+}


[3/7] airavata git commit: Removed gsi related code

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFOutputParser.java
new file mode 100644
index 0000000..b16aa9b
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/LSFOutputParser.java
@@ -0,0 +1,133 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobDescriptor;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.SSHApiException;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class LSFOutputParser implements OutputParser {
+    private final static Logger logger = LoggerFactory.getLogger(LSFOutputParser.class);
+
+    @Override
+    public void parseSingleJob(JobDescriptor jobDescriptor, String rawOutput) throws SSHApiException {
+        logger.debug(rawOutput);
+        //todo we need to implement this but we are not using it airavata runtime
+        // if someone is using the gsissh as a tool this will be useful to get a descriptive information about a single job
+    }
+
+    @Override
+    public String parseJobSubmission(String rawOutput) throws SSHApiException {
+        logger.debug(rawOutput);
+        return rawOutput.substring(rawOutput.indexOf("<")+1,rawOutput.indexOf(">"));
+    }
+
+    @Override
+    public JobStatus parseJobStatus(String jobID, String rawOutput) throws SSHApiException {
+        boolean jobFount = false;
+        logger.debug(rawOutput);
+        //todo this is not used anymore
+        return null;
+    }
+
+    @Override
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws SSHApiException {
+        logger.debug(rawOutput);
+
+        String[]    info = rawOutput.split("\n");
+//        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            String jobName = jobID.split(",")[1];
+            boolean found = false;
+            for (int i = 0; i < info.length; i++) {
+                if (info[i].contains(jobName.substring(0,8))) {
+                    // now starts processing this line
+                    logger.info(info[i]);
+                    String correctLine = info[i];
+                    String[] columns = correctLine.split(" ");
+                    List<String> columnList = new ArrayList<String>();
+                    for (String s : columns) {
+                        if (!"".equals(s)) {
+                            columnList.add(s);
+                        }
+                    }
+//                    lastStop = i + 1;
+                    try {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf(columnList.get(2))));
+                    }catch(IndexOutOfBoundsException e) {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf("U")));
+                    }
+                    found = true;
+                    break;
+                }
+            }
+            if(!found)
+                logger.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobID.split(",")[0]);
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
+        String regJobId = "jobId";
+        Pattern pattern = Pattern.compile("(?=(?<" + regJobId + ">\\d+)\\s+\\w+\\s+" + jobName + ")"); // regex - look ahead and match
+        if (rawOutput != null) {
+            Matcher matcher = pattern.matcher(rawOutput);
+            if (matcher.find()) {
+                return matcher.group(regJobId);
+            } else {
+                logger.error("No match is found for JobName");
+                return null;
+            }
+        } else {
+            logger.error("Error: RawOutput shouldn't be null");
+            return null;
+        }
+    }
+
+    public static void main(String[] args) {
+        String test = "Job <2477982> is submitted to queue <short>.";
+        System.out.println(test.substring(test.indexOf("<")+1, test.indexOf(">")));
+        String test1 = "JOBID   USER    STAT  QUEUE      FROM_HOST   EXEC_HOST   JOB_NAME   SUBMIT_TIME\n" +
+                "2636607 lg11w   RUN   long       ghpcc06     c11b02      *069656647 Mar  7 00:58\n" +
+                "2636582 lg11w   RUN   long       ghpcc06     c02b01      2134490944 Mar  7 00:48";
+        Map<String, JobStatus> statusMap = new HashMap<String, JobStatus>();
+        statusMap.put("2477983,2134490944", new JobStatus(JobState.UNKNOWN));
+        LSFOutputParser lsfOutputParser = new LSFOutputParser();
+        try {
+            lsfOutputParser.parseJobStatuses("cjh", statusMap, test1);
+        } catch (SSHApiException e) {
+            logger.error(e.getMessage(), e);
+        }
+        System.out.println(statusMap.get("2477983,2134490944"));
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSJobConfiguration.java
new file mode 100644
index 0000000..d709514
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSJobConfiguration.java
@@ -0,0 +1,121 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobManagerConfiguration;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+
+public class PBSJobConfiguration implements JobManagerConfiguration {
+
+    private String jobDescriptionTemplateName;
+
+    private String scriptExtension;
+
+    private String installedPath;
+
+    private OutputParser parser;
+
+    public PBSJobConfiguration() {
+        // this can be used to construct and use setter methods to set all the params in order
+    }
+
+    public PBSJobConfiguration(String jobDescriptionTemplateName,
+                               String scriptExtension, String installedPath, OutputParser parser) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+        if (installedPath.endsWith("/")) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+    }
+
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "qdel " + jobID);
+    }
+
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+    }
+
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "qstat -f " + jobID);
+    }
+
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
+        return new RawCommandInfo(this.installedPath + "qsub " +
+                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+    }
+
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+    public void setInstalledPath(String installedPath) {
+        this.installedPath = installedPath;
+    }
+
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    public void setParser(OutputParser parser) {
+        this.parser = parser;
+    }
+
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        // For PBS there is no option to get jobDetails by JobName, so we search with userName
+        return new RawCommandInfo(this.installedPath + "qstat -u " + userName + " -f  | grep \"Job_Name = " + jobName + "\" -B1");
+    }
+
+    @Override
+    public String  getBaseCancelCommand() {
+        return "qdel";
+    }
+
+    @Override
+    public String  getBaseMonitorCommand() {
+        return "qstat";
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return "qsub ";
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSOutputParser.java
new file mode 100644
index 0000000..38d98f9
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/PBSOutputParser.java
@@ -0,0 +1,219 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobDescriptor;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.SSHApiException;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class PBSOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(PBSOutputParser.class);
+
+    public void parseSingleJob(JobDescriptor jobDescriptor, String rawOutput) {
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        String[] line;
+        for (int i = 0; i < info.length; i++) {
+            if (info[i].contains("=")) {
+                line = info[i].split("=", 2);
+            } else {
+                line = info[i].split(":", 2);
+            }
+            if (line.length >= 2) {
+                String header = line[0].trim();
+                log.debug("Header = " + header);
+                String value = line[1].trim();
+                log.debug("value = " + value);
+
+                if (header.equals("Variable_List")) {
+                    while (info[i + 1].startsWith("\t")) {
+                        value += info[i + 1];
+                        i++;
+                    }
+                    value = value.replaceAll("\t", "");
+                    jobDescriptor.setVariableList(value);
+                } else if ("Job Id".equals(header)) {
+                    jobDescriptor.setJobID(value);
+                } else if ("Job_Name".equals(header)) {
+                    jobDescriptor.setJobName(value);
+                } else if ("Account_Name".equals(header)) {
+                    jobDescriptor.setAcountString(value);
+                } else if ("job_state".equals(header)) {
+                    jobDescriptor.setStatus(value);
+                } else if ("Job_Owner".equals(header)) {
+                    jobDescriptor.setOwner(value);
+                } else if ("resources_used.cput".equals(header)) {
+                    jobDescriptor.setUsedCPUTime(value);
+                } else if ("resources_used.mem".equals(header)) {
+                    jobDescriptor.setUsedMemory(value);
+                } else if ("resources_used.walltime".equals(header)) {
+                    jobDescriptor.setEllapsedTime(value);
+                } else if ("job_state".equals(header)) {
+                    jobDescriptor.setStatus(value);
+                } else if ("queue".equals(header))
+                    jobDescriptor.setQueueName(value);
+                else if ("ctime".equals(header)) {
+                    jobDescriptor.setCTime(value);
+                } else if ("qtime".equals(header)) {
+                    jobDescriptor.setQTime(value);
+                } else if ("mtime".equals(header)) {
+                    jobDescriptor.setMTime(value);
+                } else if ("start_time".equals(header)) {
+                    jobDescriptor.setSTime(value);
+                } else if ("comp_time".equals(header)) {
+                    jobDescriptor.setCompTime(value);
+                } else if ("exec_host".equals(header)) {
+                    jobDescriptor.setExecuteNode(value);
+                } else if ("Output_Path".equals(header)) {
+                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
+                        jobDescriptor.setStandardOutFile(value);
+                    else {
+                        jobDescriptor.setStandardOutFile(value + info[i + 1].trim());
+                        i++;
+                    }
+                } else if ("Error_Path".equals(header)) {
+                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
+                        jobDescriptor.setStandardErrorFile(value);
+                    else {
+                        String st = info[i + 1].trim();
+                        jobDescriptor.setStandardErrorFile(value + st);
+                        i++;
+                    }
+
+                } else if ("submit_args".equals(header)) {
+                    while (i + 1 < info.length) {
+                        if (info[i + 1].startsWith("\t")) {
+                            value += info[i + 1];
+                            i++;
+                        } else
+                            break;
+                    }
+                    value = value.replaceAll("\t", "");
+                    jobDescriptor.setSubmitArgs(value);
+                }
+            }
+        }
+    }
+
+    public String parseJobSubmission(String rawOutput) {
+        log.debug(rawOutput);
+        return rawOutput;  //In PBS stdout is going to be directly the jobID
+    }
+
+    public JobStatus parseJobStatus(String jobID, String rawOutput) {
+        boolean jobFount = false;
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        String[] line = null;
+        int index = 0;
+        for (String anInfo : info) {
+            index++;
+            if (anInfo.contains("Job Id:")) {
+                if (anInfo.contains(jobID)) {
+                    jobFount = true;
+                    break;
+                }
+            }
+        }
+        if (jobFount) {
+            for (int i=index;i<info.length;i++) {
+                String anInfo = info[i];
+                if (anInfo.contains("=")) {
+                    line = anInfo.split("=", 2);
+                    if (line.length != 0) {
+                        if (line[0].contains("job_state")) {
+	                        return new JobStatus(JobState.valueOf(line[1].replaceAll(" ", "")));
+                        }
+                    }
+                }
+            }
+        }
+        return null;
+    }
+
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) {
+        log.debug(rawOutput);
+        String[]    info = rawOutput.split("\n");
+//        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            String jobName = jobID.split(",")[1];
+            boolean found = false;
+            for (int i = 0; i < info.length; i++) {
+                if (info[i].contains(jobName.substring(0,8))) {
+                    // now starts processing this line
+                    log.info(info[i]);
+                    String correctLine = info[i];
+                    String[] columns = correctLine.split(" ");
+                    List<String> columnList = new ArrayList<String>();
+                    for (String s : columns) {
+                        if (!"".equals(s)) {
+                            columnList.add(s);
+                        }
+                    }
+//                    lastStop = i + 1;
+                    try {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf(columnList.get(9))));
+                    }catch(IndexOutOfBoundsException e) {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf("U")));
+                    }
+                    found = true;
+                    break;
+                }
+            }
+            if(!found)
+            log.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobID.split(",")[0]);
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
+        /* output will look like
+        Job Id: 2080802.gordon-fe2.local
+            Job_Name = A312402627
+        */
+        String regJobId = "jobId";
+        Pattern pattern = Pattern.compile("(?<" + regJobId + ">[^\\s]*)\\s*.* " + jobName);
+        if (rawOutput != null) {
+            Matcher matcher = pattern.matcher(rawOutput);
+            if (matcher.find()) {
+                return matcher.group(regJobId);
+            } else {
+                log.error("No match is found for JobName");
+                return null;
+            }
+        } else {
+            log.error("Error: RawOutput shouldn't be null");
+            return null;
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmJobConfiguration.java
new file mode 100644
index 0000000..354db8a
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmJobConfiguration.java
@@ -0,0 +1,119 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobManagerConfiguration;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+
+public class SlurmJobConfiguration implements JobManagerConfiguration {
+
+    private String jobDescriptionTemplateName;
+
+    private String scriptExtension;
+
+    private String installedPath;
+
+    private OutputParser parser;
+
+    public SlurmJobConfiguration(){
+        // this can be used to construct and use setter methods to set all the params in order
+    }
+    public SlurmJobConfiguration(String jobDescriptionTemplateName,
+                                   String scriptExtension,String installedPath,OutputParser parser) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+        if (installedPath.endsWith("/")) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+    }
+
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "scancel " + jobID);
+    }
+
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+    }
+
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "squeue -j " + jobID);
+    }
+
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    public RawCommandInfo getSubmitCommand(String workingDirectory,String pbsFilePath) {
+          return new RawCommandInfo(this.installedPath + "sbatch " +
+                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+    }
+
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+    public void setInstalledPath(String installedPath) {
+        this.installedPath = installedPath;
+    }
+
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    public void setParser(OutputParser parser) {
+        this.parser = parser;
+    }
+
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return new RawCommandInfo(this.installedPath + "squeue -u " + userName);
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        return new RawCommandInfo(this.installedPath + "squeue -n " + jobName + " -u " + userName);
+    }
+
+    @Override
+    public String getBaseCancelCommand() {
+        return "scancel";
+    }
+
+    @Override
+    public String getBaseMonitorCommand() {
+        return "squeue";
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return "sbatch";
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmOutputParser.java
new file mode 100644
index 0000000..1974843
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/SlurmOutputParser.java
@@ -0,0 +1,193 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobDescriptor;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.SSHApiException;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class SlurmOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(SlurmOutputParser.class);
+    public static final int JOB_NAME_OUTPUT_LENGTH = 8;
+    public static final String STATUS = "status";
+
+    public void parseSingleJob(JobDescriptor descriptor, String rawOutput) throws SSHApiException {
+        log.info(rawOutput);
+        String[] info = rawOutput.split("\n");
+        String lastString = info[info.length - 1];
+        if (lastString.contains("JOB ID")) {
+            // because there's no state
+            descriptor.setStatus("U");
+        } else {
+            int column = 0;
+            System.out.println(lastString);
+            for (String each : lastString.split(" ")) {
+                if (each.trim().isEmpty()) {
+                    continue;
+                } else {
+                    switch (column) {
+                        case 0:
+                            descriptor.setJobID(each);
+                            column++;
+                            break;
+                        case 1:
+                            descriptor.setPartition(each);
+                            column++;
+                            break;
+                        case 2:
+                            descriptor.setJobName(each);
+                            column++;
+                            break;
+                        case 3:
+                            descriptor.setUserName(each);
+                            column++;
+                            break;
+                        case 4:
+                            descriptor.setStatus(each);
+                            column++;
+                            break;
+                        case 5:
+                            descriptor.setUsedCPUTime(each);
+                            column++;
+                            break;
+                        case 6:
+                            try {
+                                int nodes = Integer.parseInt(each);
+                                descriptor.setNodes(nodes);
+                            }catch (Exception e){
+                                log.error("Node count read from command output is not an integer !!!");
+                            }
+                            column++;
+                            break;
+                        case 7:
+                            descriptor.setNodeList(each);
+                            column++;
+                            break;
+                    }
+                }
+            }
+        }
+
+    }
+
+    /**
+     * This can be used to parseSingleJob the outpu of sbatch and extrac the jobID from the content
+     *
+     * @param rawOutput
+     * @return
+     */
+    public String parseJobSubmission(String rawOutput) throws SSHApiException {
+        // FIXME : use regex to match correct jobId;
+        log.info(rawOutput);
+        String[] info = rawOutput.split("\n");
+        for (String anInfo : info) {
+            if (anInfo.contains("Submitted batch job")) {
+                String[] split = anInfo.split("Submitted batch job");
+                return split[1].trim();
+            }
+        }
+        return "";
+//        throw new SSHApiException(rawOutput);  //todo//To change body of implemented methods use File | Settings | File Templates.
+    }
+
+    public JobStatus parseJobStatus(String jobID, String rawOutput) throws SSHApiException {
+        log.info(rawOutput);
+        Pattern pattern = Pattern.compile(jobID + "(?=\\s+\\S+\\s+\\S+\\s+\\S+\\s+(?<" + STATUS + ">\\w+))");
+        Matcher matcher = pattern.matcher(rawOutput);
+        if (matcher.find()) {
+	        return new JobStatus(JobState.valueOf(matcher.group(STATUS)));
+        }
+        return null;
+    }
+
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws SSHApiException {
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        String lastString = info[info.length - 1];
+        if (lastString.contains("JOBID") || lastString.contains("PARTITION")) {
+            log.info("There are no jobs with this username ... ");
+            return;
+        }
+//        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            String jobId = jobID.split(",")[0];
+            String jobName = jobID.split(",")[1];
+            boolean found = false;
+            for (int i = 0; i < info.length; i++) {
+                if (info[i].contains(jobName.substring(0, 8))) {
+                    // now starts processing this line
+                    log.info(info[i]);
+                    String correctLine = info[i];
+                    String[] columns = correctLine.split(" ");
+                    List<String> columnList = new ArrayList<String>();
+                    for (String s : columns) {
+                        if (!"".equals(s)) {
+                            columnList.add(s);
+                        }
+                    }
+                    try {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf(columnList.get(4))));
+                    } catch (IndexOutOfBoundsException e) {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf("U")));
+                    }
+                    found = true;
+                    break;
+                }
+            }
+            if (!found) {
+                log.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobId);
+            }
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
+        String regJobId = "jobId";
+        if (jobName == null) {
+            return null;
+        } else if(jobName.length() > JOB_NAME_OUTPUT_LENGTH) {
+            jobName = jobName.substring(0, JOB_NAME_OUTPUT_LENGTH);
+        }
+        Pattern pattern = Pattern.compile("(?=(?<" + regJobId + ">\\d+)\\s+\\w+\\s+" + jobName + ")"); // regex - look ahead and match
+        if (rawOutput != null) {
+            Matcher matcher = pattern.matcher(rawOutput);
+            if (matcher.find()) {
+                return matcher.group(regJobId);
+            } else {
+                log.error("No match is found for JobName");
+                return null;
+            }
+        } else {
+            log.error("Error: RawOutput shouldn't be null");
+            return null;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEJobConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEJobConfiguration.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEJobConfiguration.java
new file mode 100644
index 0000000..f9c60cb
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEJobConfiguration.java
@@ -0,0 +1,121 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobManagerConfiguration;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+
+public class UGEJobConfiguration implements JobManagerConfiguration {
+
+    private String jobDescriptionTemplateName;
+
+    private String scriptExtension;
+
+    private String installedPath;
+
+    private OutputParser parser;
+
+    public UGEJobConfiguration() {
+        // this can be used to construct and use setter methods to set all the params in order
+    }
+
+    public UGEJobConfiguration(String jobDescriptionTemplateName,
+                               String scriptExtension, String installedPath, OutputParser parser) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+        if (installedPath.endsWith("/")) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+    }
+
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "qdel " + jobID);
+    }
+
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+    }
+
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "qstat -j " + jobID);
+    }
+
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
+        return new RawCommandInfo(this.installedPath + "qsub " +
+                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+    }
+
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+    public void setInstalledPath(String installedPath) {
+        this.installedPath = installedPath;
+    }
+
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    public void setParser(OutputParser parser) {
+        this.parser = parser;
+    }
+
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        // For PBS there is no option to get jobDetails by JobName, so we search with userName
+        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
+    }
+
+    @Override
+    public String  getBaseCancelCommand() {
+        return "qdel";
+    }
+
+    @Override
+    public String  getBaseMonitorCommand() {
+        return "qstat";
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return "qsub ";
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEOutputParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEOutputParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEOutputParser.java
new file mode 100644
index 0000000..0ece2d9
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/job/UGEOutputParser.java
@@ -0,0 +1,191 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.impl.job;
+
+import org.apache.airavata.gfac.core.JobDescriptor;
+import org.apache.airavata.gfac.core.cluster.OutputParser;
+import org.apache.airavata.gfac.core.SSHApiException;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class UGEOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(PBSOutputParser.class);
+    public static final String JOB_ID = "jobId";
+
+    public void parseSingleJob(JobDescriptor jobDescriptor, String rawOutput) {
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        String[] line;
+        for (int i = 0; i < info.length; i++) {
+            if (info[i].contains("=")) {
+                line = info[i].split("=", 2);
+            } else {
+                line = info[i].split(":", 2);
+            }
+            if (line.length >= 2) {
+                String header = line[0].trim();
+                log.debug("Header = " + header);
+                String value = line[1].trim();
+                log.debug("value = " + value);
+
+                if (header.equals("Variable_List")) {
+                    while (info[i + 1].startsWith("\t")) {
+                        value += info[i + 1];
+                        i++;
+                    }
+                    value = value.replaceAll("\t", "");
+                    jobDescriptor.setVariableList(value);
+                } else if ("Job Id".equals(header)) {
+                    jobDescriptor.setJobID(value);
+                } else if ("Job_Name".equals(header)) {
+                    jobDescriptor.setJobName(value);
+                } else if ("Account_Name".equals(header)) {
+                    jobDescriptor.setAcountString(value);
+                } else if ("job_state".equals(header)) {
+                    jobDescriptor.setStatus(value);
+                } else if ("Job_Owner".equals(header)) {
+                    jobDescriptor.setOwner(value);
+                } else if ("resources_used.cput".equals(header)) {
+                    jobDescriptor.setUsedCPUTime(value);
+                } else if ("resources_used.mem".equals(header)) {
+                    jobDescriptor.setUsedMemory(value);
+                } else if ("resources_used.walltime".equals(header)) {
+                    jobDescriptor.setEllapsedTime(value);
+                } else if ("job_state".equals(header)) {
+                    jobDescriptor.setStatus(value);
+                } else if ("queue".equals(header))
+                    jobDescriptor.setQueueName(value);
+                else if ("ctime".equals(header)) {
+                    jobDescriptor.setCTime(value);
+                } else if ("qtime".equals(header)) {
+                    jobDescriptor.setQTime(value);
+                } else if ("mtime".equals(header)) {
+                    jobDescriptor.setMTime(value);
+                } else if ("start_time".equals(header)) {
+                    jobDescriptor.setSTime(value);
+                } else if ("comp_time".equals(header)) {
+                    jobDescriptor.setCompTime(value);
+                } else if ("exec_host".equals(header)) {
+                    jobDescriptor.setExecuteNode(value);
+                } else if ("Output_Path".equals(header)) {
+                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
+                        jobDescriptor.setStandardOutFile(value);
+                    else {
+                        jobDescriptor.setStandardOutFile(value + info[i + 1].trim());
+                        i++;
+                    }
+                } else if ("Error_Path".equals(header)) {
+                    if (info[i + 1].contains("=") || info[i + 1].contains(":"))
+                        jobDescriptor.setStandardErrorFile(value);
+                    else {
+                        String st = info[i + 1].trim();
+                        jobDescriptor.setStandardErrorFile(value + st);
+                        i++;
+                    }
+
+                } else if ("submit_args".equals(header)) {
+                    while (i + 1 < info.length) {
+                        if (info[i + 1].startsWith("\t")) {
+                            value += info[i + 1];
+                            i++;
+                        } else
+                            break;
+                    }
+                    value = value.replaceAll("\t", "");
+                    jobDescriptor.setSubmitArgs(value);
+                }
+            }
+        }
+    }
+
+	public String parseJobSubmission(String rawOutput) {
+		log.debug(rawOutput);
+		if (rawOutput != null && !rawOutput.isEmpty()) {
+			String[] info = rawOutput.split("\n");
+			String lastLine = info[info.length - 1];
+			return lastLine.split(" ")[2]; // In PBS stdout is going to be directly the jobID
+		} else {
+			return "";
+		}
+	}
+
+    public JobStatus parseJobStatus(String jobID, String rawOutput) {
+        Pattern pattern = Pattern.compile("job_number:[\\s]+" + jobID);
+        Matcher matcher = pattern.matcher(rawOutput);
+        if (matcher.find()) {
+	        return new JobStatus(JobState.QUEUED); // fixme; return correct status.
+        }
+	    return new JobStatus(JobState.UNKNOWN);
+    }
+
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) {
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            for(int i=lastStop;i<info.length;i++){
+               if(jobID.split(",")[0].contains(info[i].split(" ")[0]) && !"".equals(info[i].split(" ")[0])){
+                   // now starts processing this line
+                   log.info(info[i]);
+                   String correctLine = info[i];
+                   String[] columns = correctLine.split(" ");
+                   List<String> columnList = new ArrayList<String>();
+                   for (String s : columns) {
+                       if (!"".equals(s)) {
+                           columnList.add(s);
+                       }
+                   }
+                   lastStop = i+1;
+                   if ("E".equals(columnList.get(4))) {
+                       // There is another status with the same letter E other than error status
+                       // to avoid that we make a small tweek to the job status
+                       columnList.set(4, "Er");
+                   }
+	               statusMap.put(jobID, new JobStatus(JobState.valueOf(columnList.get(4))));
+	               break;
+               }
+            }
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws SSHApiException {
+        if (jobName.length() > 10) {
+            jobName = jobName.substring(0, 10);
+        }
+        Pattern pattern = Pattern.compile("(?<" + JOB_ID + ">\\S+)\\s+\\S+\\s+(" + jobName + ")");
+        Matcher matcher = pattern.matcher(rawOutput);
+        if (matcher.find()) {
+            return matcher.group(JOB_ID);
+        }
+        return null;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
deleted file mode 100644
index 69119d2..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package org.apache.airavata.gfac.monitor;/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.SecurityContext;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.Timestamp;
-import java.util.Date;
-
-public class HPCMonitorID extends MonitorID {
-    private final static Logger logger = LoggerFactory.getLogger(HPCMonitorID.class);
-
-
-    private AuthenticationInfo authenticationInfo = null;
-
-    public HPCMonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID,
-                        String experimentID, String userName,String jobName) {
-        super(computeResourceDescription, jobID, taskID, workflowNodeID, experimentID, userName,jobName);
-        setComputeResourceDescription(computeResourceDescription);
-        setJobStartedTime(new Timestamp((new Date()).getTime()));
-        setUserName(userName);
-        setJobID(jobID);
-        setTaskID(taskID);
-        setExperimentID(experimentID);
-        setWorkflowNodeID(workflowNodeID);
-    }
-
-    public HPCMonitorID(AuthenticationInfo authenticationInfo, JobExecutionContext jobExecutionContext) {
-        super(jobExecutionContext);
-        this.authenticationInfo = authenticationInfo;
-        if (this.authenticationInfo != null) {
-            try {
-                String hostAddress = jobExecutionContext.getHostName();
-                SecurityContext securityContext = jobExecutionContext.getSecurityContext(hostAddress);
-                ServerInfo serverInfo = null;
-                if (securityContext != null) {
-                    if (securityContext instanceof  GSISecurityContext){
-                        serverInfo = (((GSISecurityContext) securityContext).getRemoteCluster()).getServerInfo();
-                        if (serverInfo.getUserName() != null) {
-                            setUserName(serverInfo.getUserName());
-                        }
-                    }
-                    if (securityContext instanceof SSHSecurityContext){
-                        serverInfo = (((SSHSecurityContext) securityContext).getRemoteCluster()).getServerInfo();
-                        if (serverInfo.getUserName() != null) {
-                            setUserName(serverInfo.getUserName());
-                        }
-                    }
-                }
-            } catch (GFacException e) {
-                logger.error("Error while getting security context", e);
-            }
-        }
-    }
-
-    public HPCMonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID, String experimentID, String userName, AuthenticationInfo authenticationInfo) {
-        setComputeResourceDescription(computeResourceDescription);
-        setJobStartedTime(new Timestamp((new Date()).getTime()));
-        this.authenticationInfo = authenticationInfo;
-        // if we give myproxyauthenticationInfo, so we try to use myproxy user as the user
-        if (this.authenticationInfo != null) {
-            if (this.authenticationInfo instanceof MyProxyAuthenticationInfo) {
-                setUserName(((MyProxyAuthenticationInfo) this.authenticationInfo).getUserName());
-            }
-        }
-        setJobID(jobID);
-        setTaskID(taskID);
-        setExperimentID(experimentID);
-        setWorkflowNodeID(workflowNodeID);
-    }
-
-    public AuthenticationInfo getAuthenticationInfo() {
-        return authenticationInfo;
-    }
-
-    public void setAuthenticationInfo(AuthenticationInfo authenticationInfo) {
-        this.authenticationInfo = authenticationInfo;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
deleted file mode 100644
index f29e3e6..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class HostMonitorData {
-//    private HostDescription host;
-    private ComputeResourceDescription computeResourceDescription;
-    private JobSubmissionProtocol jobSubmissionProtocol;
-    private DataMovementProtocol dataMovementProtocol;
-
-    private List<MonitorID> monitorIDs;
-
-    public HostMonitorData(JobExecutionContext jobExecutionContext) {
-        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
-        this.jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
-        this.dataMovementProtocol = jobExecutionContext.getPreferredDataMovementProtocol();
-        this.monitorIDs = new ArrayList<MonitorID>();
-    }
-
-    public HostMonitorData(JobExecutionContext jobExecutionContext, List<MonitorID> monitorIDs) {
-        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
-        this.jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
-        this.dataMovementProtocol = jobExecutionContext.getPreferredDataMovementProtocol();
-        this.monitorIDs = monitorIDs;
-    }
-
-    public ComputeResourceDescription getComputeResourceDescription() {
-        return computeResourceDescription;
-    }
-
-    public void setComputeResourceDescription(ComputeResourceDescription computeResourceDescription) {
-        this.computeResourceDescription = computeResourceDescription;
-    }
-
-    public List<MonitorID> getMonitorIDs() {
-        return monitorIDs;
-    }
-
-    public void setMonitorIDs(List<MonitorID> monitorIDs) {
-        this.monitorIDs = monitorIDs;
-    }
-
-    /**
-     * this method get called by CommonUtils and it will check the right place before adding
-     * so there will not be a mismatch between this.host and monitorID.host
-     * @param monitorID
-     * @throws org.apache.airavata.gfac.monitor.exception.AiravataMonitorException
-     */
-    public void addMonitorIDForHost(MonitorID monitorID)throws AiravataMonitorException {
-        monitorIDs.add(monitorID);
-    }
-
-    public JobSubmissionProtocol getJobSubmissionProtocol() {
-        return jobSubmissionProtocol;
-    }
-
-    public DataMovementProtocol getDataMovementProtocol() {
-        return dataMovementProtocol;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/UserMonitorData.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/UserMonitorData.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/UserMonitorData.java
deleted file mode 100644
index 022d17c..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/UserMonitorData.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor;
-
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * This is the datastructure to keep the user centric job data, rather keeping
- * the individual jobs we keep the jobs based on the each user
- */
-public class UserMonitorData {
-    private final static Logger logger = LoggerFactory.getLogger(UserMonitorData.class);
-
-    private String  userName;
-
-    private List<HostMonitorData> hostMonitorData;
-
-
-    public UserMonitorData(String userName) {
-        this.userName = userName;
-        hostMonitorData = new ArrayList<HostMonitorData>();
-    }
-
-    public UserMonitorData(String userName, List<HostMonitorData> hostMonitorDataList) {
-        this.hostMonitorData = hostMonitorDataList;
-        this.userName = userName;
-    }
-
-    public List<HostMonitorData> getHostMonitorData() {
-        return hostMonitorData;
-    }
-
-    public void setHostMonitorData(List<HostMonitorData> hostMonitorData) {
-        this.hostMonitorData = hostMonitorData;
-    }
-
-    public String getUserName() {
-        return userName;
-    }
-
-    public void setUserName(String userName) {
-        this.userName = userName;
-    }
-
-    /*
-    This method will add element to the MonitorID list, user should not
-    duplicate it, we do not check it because its going to be used by airavata
-    so we have to use carefully and this method will add a host if its a new host
-     */
-    public void addHostMonitorData(HostMonitorData hostMonitorData) throws AiravataMonitorException {
-        this.hostMonitorData.add(hostMonitorData);
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/ExperimentCancelRequest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/ExperimentCancelRequest.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/ExperimentCancelRequest.java
deleted file mode 100644
index f19decf..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/ExperimentCancelRequest.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.monitor.command;
-
-public class ExperimentCancelRequest {
-	private String experimentId;
-
-	public ExperimentCancelRequest(String experimentId) {
-		this.experimentId = experimentId;
-	}
-
-	public String getExperimentId() {
-		return experimentId;
-	}
-
-	public void setExperimentId(String experimentId) {
-		this.experimentId = experimentId;
-	}
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/TaskCancelRequest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/TaskCancelRequest.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/TaskCancelRequest.java
deleted file mode 100644
index b45e01c..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/command/TaskCancelRequest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.monitor.command;
-
-public class TaskCancelRequest {
-	private String experimentId;
-	private String nodeId;
-	private String taskId;
-	
-	public TaskCancelRequest(String experimentId, String nodeId, String taskId) {
-		this.experimentId = experimentId;
-		this.setNodeId(nodeId);
-		this.taskId = taskId;
-	}
-	public String getExperimentId() {
-		return experimentId;
-	}
-	public void setExperimentId(String experimentId) {
-		this.experimentId = experimentId;
-	}
-	public String getTaskId() {
-		return taskId;
-	}
-	public void setTaskId(String taskId) {
-		this.taskId = taskId;
-	}
-	public String getNodeId() {
-		return nodeId;
-	}
-	public void setNodeId(String nodeId) {
-		this.nodeId = nodeId;
-	}
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/AiravataAbstractMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/AiravataAbstractMonitor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/AiravataAbstractMonitor.java
deleted file mode 100644
index 72ffad6..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/AiravataAbstractMonitor.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.core;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * This is the abstract Monitor which needs to be used by
- * any Monitoring implementation which expect nto consume
- * to store the status to registry. Because they have to
- * use the LocalEventPublisher to publish the monitoring statuses
- * to the Event Bus. All the Monitor statuses publish to the eventbus
- * will be saved to the Registry.
- */
-public abstract class AiravataAbstractMonitor implements Monitor {
-    private final static Logger logger = LoggerFactory.getLogger(AiravataAbstractMonitor.class);
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/MessageParser.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/MessageParser.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/MessageParser.java
deleted file mode 100644
index aada526..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/MessageParser.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.core;
-
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.model.experiment.JobState;
-
-/**
- * This is an interface to implement messageparser, it could be
- * pull based or push based still monitor has to parse the content of
- * the message it gets from remote monitoring system and finalize
- * them to internal job state, Ex: JSON parser for AMQP and Qstat reader
- * for pull based monitor.
- */
-public interface MessageParser {
-    /**
-     * This method is to implement how to parse the incoming message
-     * and implement a logic to finalize the status of the job,
-     * we have to makesure the correct message is given to the messageparser
-     * parse method, it will not do any filtering
-     * @param message content of the message
-     * @return
-     */
-    JobState parseMessage(String message)throws AiravataMonitorException;
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/Monitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/Monitor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/Monitor.java
deleted file mode 100644
index 614d606..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/Monitor.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.core;
-
-
-/**
- * This is the primary interface for Monitors,
- * This can be used to implement different methods of monitoring
- */
-public interface Monitor {
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PullMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PullMonitor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PullMonitor.java
deleted file mode 100644
index efdf89c..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PullMonitor.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.core;
-
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-
-/**
- * PullMonitors can implement this interface
- * Since the pull and push based monitoring required different
- * operations, PullMonitor will be useful.
- * This will allow users to program Pull monitors separately
- */
-public abstract class PullMonitor extends AiravataAbstractMonitor {
-
-    private int pollingFrequence;
-    /**
-     * This method will can invoke when PullMonitor needs to start
-     * and it has to invoke in the frequency specified below,
-     * @return if the start process is successful return true else false
-     */
-    public abstract boolean startPulling() throws AiravataMonitorException;
-
-    /**
-     * This is the method to stop the polling process
-     * @return if the stopping process is successful return true else false
-     */
-    public abstract boolean stopPulling()throws AiravataMonitorException;
-
-    /**
-     * this method can be used to set the polling frequencey or otherwise
-     * can implement a polling mechanism, and implement how to do
-     * @param frequence
-     */
-    public void setPollingFrequence(int frequence){
-        this.pollingFrequence = frequence;
-    }
-
-    /**
-     * this method can be used to get the polling frequencey or otherwise
-     * can implement a polling mechanism, and implement how to do
-     * @return
-     */
-    public int getPollingFrequence(){
-        return this.pollingFrequence;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PushMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PushMonitor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PushMonitor.java
deleted file mode 100644
index 1b6a228..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/core/PushMonitor.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.core;
-
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-
-/**
- * PushMonitors can implement this interface
- * Since the pull and push based monitoring required different
- * operations, PullMonitor will be useful.
- * This interface will allow users to program Push monitors separately
- */
-public abstract class PushMonitor extends AiravataAbstractMonitor {
-    /**
-     * This method can be invoked to register a listener with the
-     * remote monitoring system, ideally inside this method users will be
-     * writing some client listener code for the remote monitoring system,
-     * this will be a simple wrapper around any client for the remote Monitor.
-     * @param monitorID
-     * @return
-     */
-    public abstract boolean registerListener(MonitorID monitorID)throws AiravataMonitorException;
-
-    /**
-     * This method can be invoked to unregister a listener with the
-     * remote monitoring system, ideally inside this method users will be
-     * writing some client listener code for the remote monitoring system,
-     * this will be a simple wrapper around any client for the remote Monitor.
-     * @param monitorID
-     * @return
-     */
-    public abstract boolean unRegisterListener(MonitorID monitorID)throws AiravataMonitorException;
-
-    /**
-     * This can be used to stop the registration thread
-     * @return
-     * @throws org.apache.airavata.gfac.monitor.exception.AiravataMonitorException
-     */
-    public abstract boolean stopRegister()throws AiravataMonitorException;
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailBasedMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailBasedMonitor.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailBasedMonitor.java
index 782a454..5ef0e88 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailBasedMonitor.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailBasedMonitor.java
@@ -28,6 +28,8 @@ import org.apache.airavata.gfac.core.GFacException;
 import org.apache.airavata.gfac.core.GFacUtils;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.GFacThreadPoolExecutor;
+import org.apache.airavata.gfac.core.context.ProcessContext;
+import org.apache.airavata.gfac.core.monitor.JobMonitor;
 import org.apache.airavata.gfac.core.monitor.JobStatusResult;
 import org.apache.airavata.gfac.core.monitor.EmailParser;
 import org.apache.airavata.gfac.impl.OutHandlerWorker;
@@ -42,6 +44,8 @@ import org.apache.airavata.model.experiment.CorrectiveAction;
 import org.apache.airavata.model.experiment.ErrorCategory;
 import org.apache.airavata.model.experiment.JobState;
 import org.apache.airavata.model.experiment.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.mail.Address;
 import javax.mail.Flags;
@@ -60,8 +64,8 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.concurrent.ConcurrentHashMap;
 
-public class EmailBasedMonitor implements Runnable{
-    private static final AiravataLogger log = AiravataLoggerFactory.getLogger(EmailBasedMonitor.class);
+public class EmailBasedMonitor implements JobMonitor, Runnable{
+    private static final Logger log = LoggerFactory.getLogger(EmailBasedMonitor.class);
 
     public static final int COMPARISON = 6; // after and equal
     public static final String IMAPS = "imaps";
@@ -350,4 +354,14 @@ public class EmailBasedMonitor implements Runnable{
     public void setDate(Date date) {
         this.monitorStartDate = date;
     }
+
+	@Override
+	public void monitor(String jobId, ProcessContext processContext) {
+
+	}
+
+	@Override
+	public void stopMonitor(String jobId) {
+
+	}
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailMonitorFactory.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailMonitorFactory.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailMonitorFactory.java
deleted file mode 100644
index 870cfa4..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/email/EmailMonitorFactory.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.email;
-
-import org.apache.airavata.common.exception.AiravataException;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
-
-import java.util.Calendar;
-import java.util.Date;
-
-public class EmailMonitorFactory {
-
-    private static EmailBasedMonitor emailBasedMonitor;
-    private static Date startMonitorDate = Calendar.getInstance().getTime();
-
-    public static EmailBasedMonitor getEmailBasedMonitor(ResourceJobManagerType resourceJobManagerType) throws AiravataException {
-        if (emailBasedMonitor == null) {
-            synchronized (EmailMonitorFactory.class){
-                if (emailBasedMonitor == null) {
-                    emailBasedMonitor = new EmailBasedMonitor(resourceJobManagerType);
-                    emailBasedMonitor.setDate(startMonitorDate);
-                    new Thread(emailBasedMonitor).start();
-                }
-            }
-        }
-        return emailBasedMonitor;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d9b2df03/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
deleted file mode 100644
index a7e5b90..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.monitor.handlers;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.logger.AiravataLogger;
-import org.apache.airavata.common.logger.AiravataLoggerFactory;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.handler.ThreadedHandler;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gfac.monitor.HPCMonitorID;
-import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
-import org.apache.airavata.gfac.monitor.impl.pull.qstat.HPCPullMonitor;
-import org.apache.airavata.gfac.monitor.util.CommonUtils;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.Watcher;
-
-import java.util.Properties;
-
-/**
- * this handler is responsible for monitoring jobs in a pull mode
- * and currently this support multiple pull monitoring in grid resource and uses
- * commands like qstat,squeue and this supports sun grid enging monitoring too
- * which is a slight variation of qstat monitoring.
- */
-public class GridPullMonitorHandler extends ThreadedHandler implements Watcher{
-    private final static AiravataLogger logger = AiravataLoggerFactory.getLogger(GridPullMonitorHandler.class);
-
-    private HPCPullMonitor hpcPullMonitor;
-
-    private AuthenticationInfo authenticationInfo;
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-        String myProxyUser = null;
-        try {
-            myProxyUser = ServerSettings.getSetting("myproxy.username");
-            String myProxyPass = ServerSettings.getSetting("myproxy.password");
-            String certPath = ServerSettings.getSetting("trusted.cert.location");
-            String myProxyServer = ServerSettings.getSetting("myproxy.server");
-            setAuthenticationInfo(new MyProxyAuthenticationInfo(myProxyUser, myProxyPass, myProxyServer,
-                    7512, 17280000, certPath));
-            hpcPullMonitor = new HPCPullMonitor(null,getAuthenticationInfo());    // we use our own credentials for monitoring, not from the store
-        } catch (ApplicationSettingsException e) {
-            logger.error("Error while  reading server properties", e);
-            throw new GFacHandlerException("Error while  reading server properties", e);
-        }
-    }
-
-    public void run() {
-        hpcPullMonitor.run();
-    }
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        super.invoke(jobExecutionContext);
-        hpcPullMonitor.setGfac(jobExecutionContext.getGfac());
-        hpcPullMonitor.setPublisher(jobExecutionContext.getLocalEventPublisher());
-        MonitorID monitorID = new HPCMonitorID(getAuthenticationInfo(), jobExecutionContext);
-        try {
-           /* ZooKeeper zk = jobExecutionContext.getZk();
-            try {
-                String experimentEntry = GFacUtils.findExperimentEntry(jobExecutionContext.getExperimentID(), zk);
-                String path = experimentEntry + File.separator + "operation";
-                Stat exists = zk.exists(path, this);
-                if (exists != null) {
-                    zk.getData(path, this, exists); // watching the operations node
-                }
-            } catch (KeeperException e) {
-                logger.error(e.getMessage(), e);
-            } catch (InterruptedException e) {
-                logger.error(e.getMessage(), e);
-            }*/
-            CommonUtils.addMonitortoQueue(hpcPullMonitor.getQueue(), monitorID, jobExecutionContext);
-            CommonUtils.increaseZkJobCount(monitorID); // update change job count to zookeeper
-        } catch (AiravataMonitorException e) {
-            logger.errorId(monitorID.getJobID(), "Error adding job {} monitorID object to the queue with experiment {}",
-                    monitorID.getJobID(),  monitorID.getExperimentID());
-        }
-    }
-
-    @Override
-    public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        // TODO: Auto generated method body.
-    }
-
-    public AuthenticationInfo getAuthenticationInfo() {
-        return authenticationInfo;
-    }
-
-    public HPCPullMonitor getHpcPullMonitor() {
-        return hpcPullMonitor;
-    }
-
-    public void setAuthenticationInfo(AuthenticationInfo authenticationInfo) {
-        this.authenticationInfo = authenticationInfo;
-    }
-
-    public void setHpcPullMonitor(HPCPullMonitor hpcPullMonitor) {
-        this.hpcPullMonitor = hpcPullMonitor;
-    }
-
-
-    public void process(WatchedEvent watchedEvent) {
-        logger.info(watchedEvent.getPath());
-        if(Event.EventType.NodeDataChanged.equals(watchedEvent.getType())){
-            // node data is changed, this means node is cancelled.
-            logger.info("Experiment is cancelled with this path:"+watchedEvent.getPath());
-
-            String[] split = watchedEvent.getPath().split("/");
-            for(String element:split) {
-                if (element.contains("+")) {
-                    logger.info("Adding experimentID+TaskID to be removed from monitoring:"+element);
-                    hpcPullMonitor.getCancelJobList().add(element);
-                }
-            }
-        }
-    }
-}