You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by sh...@apache.org on 2015/05/08 17:55:16 UTC

[1/4] airavata git commit: removed gfac-ec2, gfac-gram and gfac-hadoop modules from source.

Repository: airavata
Updated Branches:
  refs/heads/master 742edee5c -> 702399169


http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java b/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
deleted file mode 100644
index 6d5427a..0000000
--- a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class GramProviderTestWithMyProxyAuth extends GFacBaseTestWithMyProxyAuth {
-    private JobExecutionContext jobExecutionContext;
-
-
-//    private static final String hostAddress = "blacklight.psc.teragrid.org";
-//    private static final String hostName = "Blacklight";
-//    private static final String gridftpAddress = "gsiftp://gridftp.blacklight.psc.teragrid.org:2812";
-//    private static final String gramAddress = "";
-
-    //FIXME: move job properties to configuration file
-    private static final String hostAddress = "trestles.sdsc.edu";
-    private static final String hostName = "trestles";
-    private static final String gridftpAddress = "gsiftp://trestles.sdsc.edu:2811/";
-    private static final String gramAddress = "trestles-login2.sdsc.edu:2119/jobmanager-pbstest2";
-
-    @Before
-    public void setUp() throws Exception {
-        URL resource = GramProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null);
-//        gFacConfiguration.setMyProxyLifeCycle(3600);
-//        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
-//        gFacConfiguration.setMyProxyUser("*****");
-//        gFacConfiguration.setMyProxyPassphrase("*****");
-//        gFacConfiguration.setTrustedCertLocation("./certificates");
-//        //have to set InFlwo Handlers and outFlowHandlers
-//        gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"GramDirectorySetupHandler","GridFTPInputHandler"}));
-//        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"GridFTPOutputHandler"}));
-
-        /*
-           * Host
-           */
-        HostDescription host = new HostDescription(GlobusHostType.type);
-        host.getType().setHostAddress(hostAddress);
-        host.getType().setHostName(hostName);
-        ((GlobusHostType)host.getType()).setGlobusGateKeeperEndPointArray(new String[]{gramAddress});
-        ((GlobusHostType)host.getType()).setGridFTPEndPointArray(new String[]{gridftpAddress});
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType)appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-        ProjectAccountType projectAccountType = app.addNewProjectAccount();
-        projectAccountType.setProjectAccountNumber("sds128");
-
-        QueueType queueType = app.addNewQueue();
-        queueType.setQueueName("development");
-
-        app.setCpuCount(1);
-        app.setJobType(JobTypeType.SERIAL);
-        app.setNodeCount(1);
-        app.setProcessorsPerNode(1);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        app.setExecutableLocation("/bin/echo");
-
-        /*
-           * Default tmp location
-           */
-        String tempDir = "/scratch/01437/ogce/test/";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
-
-        System.out.println(tempDir);
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "inputData");
-        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
-        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
-        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
-
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-
-        InputParameterType input1 = InputParameterType.Factory.newInstance();
-        input.setParameterName("myinput");
-        URIParameterType uriType = URIParameterType.Factory.newInstance();
-        uriType.setValue("gsiftp://gridftp1.ls4.tacc.utexas.edu:2811//home1/01437/ogce/gram_20130215.log");
-        input.setParameterType(uriType);
-        inputList.add(input1);
-
-
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-
-                                                                                       .size()]);
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        // Adding security context
-        jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, getSecurityContext());
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-        applicationContext.setHostDescription(host);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        // added extra
-        ActualParameter copy_input = new ActualParameter();
-        copy_input.getType().changeType(URIParameterType.type);
-        ((URIParameterType)copy_input.getType()).setValue("file:///tmp/tmpstrace");
-
-        ActualParameter outlocation = new ActualParameter();
-        ((StringParameterType)outlocation.getType()).setValue("./outputData/.");
-        inMessage.addParameter("copy_input", copy_input);
-        inMessage.addParameter("outputlocation", outlocation);
-
-        // added extra
-
-
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-    @Test
-    public void testGramProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
-    }
-
-    @Test
-    public void testGetJdbcUrl()  {
-        System.out.println(getJDBCUrl());
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt b/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
deleted file mode 100644
index e749e9c..0000000
--- a/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
+++ /dev/null
@@ -1,73 +0,0 @@
-<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
-	distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
-	the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
-	obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
-	in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-	ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
-	the License. -->
-<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
-<xsl:output method="text" />
-<xsl:template match="/ns:JobDescriptor">
-#! /bin/sh
-# PBS batch job script built by Globus job manager
-#   <xsl:choose>
-    <xsl:when test="ns:shellName">
-##PBS -S <xsl:value-of select="ns:shellName"/>
-    </xsl:when></xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:queueName">
-#PBS -q <xsl:value-of select="ns:queueName"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:mailOptions">
-#PBS -m <xsl:value-of select="ns:mailOptions"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-<xsl:when test="ns:acountString">
-#PBS -A <xsl:value-of select="ns:acountString"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:maxWallTime">
-#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -o <xsl:value-of select="ns:standardOutFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
-#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of select="ns:processesPerNode"/>
-<xsl:text>&#xa;</xsl:text>
-    </xsl:when>
-    </xsl:choose>
-<xsl:for-each select="ns:exports/ns:name">
-<xsl:value-of select="."/>=<xsl:value-of select="./@value"/><xsl:text>&#xa;</xsl:text>
-export<xsl:text>   </xsl:text><xsl:value-of select="."/>
-<xsl:text>&#xa;</xsl:text>
-</xsl:for-each>
-<xsl:for-each select="ns:preJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-cd <xsl:text>   </xsl:text><xsl:value-of select="ns:workingDirectory"/><xsl:text>&#xa;</xsl:text>
-    <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
-<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text>   </xsl:text></xsl:when></xsl:choose><xsl:value-of select="ns:executablePath"/><xsl:text>   </xsl:text>
-<xsl:for-each select="ns:inputs/ns:input">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-<xsl:for-each select="ns:postJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-</xsl:for-each>
-
-</xsl:template>
-
-</xsl:stylesheet>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/resources/logging.properties b/modules/gfac/gfac-gram/src/test/resources/logging.properties
deleted file mode 100644
index 0584d38..0000000
--- a/modules/gfac/gfac-gram/src/test/resources/logging.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#default/fallback log4j configuration
-#
-
-# Set root logger level to WARN and its only appender to A1.
-log4j.rootLogger=INFO, A1, A2
-
-# A1 is set to be a rolling file appender with default params
-log4j.appender.A1=org.apache.log4j.RollingFileAppender
-log4j.appender.A1.File=target/seclogs.txt
-
-# A1 uses PatternLayout.
-log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
-
-# A2 is a console appender
-log4j.appender.A2=org.apache.log4j.ConsoleAppender
-
-# A2 uses PatternLayout.
-log4j.appender.A2.layout=org.apache.log4j.PatternLayout
-log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
-
-log4j.logger.unicore.security=INFO
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/pom.xml b/modules/gfac/gfac-hadoop/pom.xml
deleted file mode 100644
index 3cd412c..0000000
--- a/modules/gfac/gfac-hadoop/pom.xml
+++ /dev/null
@@ -1,116 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file 
-    distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under 
-    the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may 
-    obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to 
-    in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
-    ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under 
-    the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>org.apache.airavata</groupId>
-        <artifactId>gfac</artifactId>
-        <version>0.14-SNAPSHOT</version>
-        <relativePath>../pom.xml</relativePath>
-    </parent>
-
-    <modelVersion>4.0.0</modelVersion>
-    <artifactId>airavata-gfac-hadoop</artifactId>
-    <name>Airavata GFac Hadoop implementation</name>
-    <description>This is the extension of GFAC to use Hadoop.</description>
-    <url>http://airavata.apache.org/</url>
-
-    <dependencies>
-        <!-- Logging -->
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-        <!-- GFAC schemas -->
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-gfac-core</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <!-- Test -->
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-            <version>6.1.1</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>jcl-over-slf4j</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-server-configuration</artifactId>
-	    <scope>test</scope>
-        </dependency>
-	    <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-client-configuration</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-
-
-        <!-- Hadoop provider related dependencies -->
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
-            <version>1.0.3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <version>1.0.3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.whirr</groupId>
-            <artifactId>whirr-core</artifactId>
-            <version>0.7.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.whirr</groupId>
-            <artifactId>whirr-hadoop</artifactId>
-            <version>0.7.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.hamcrest</groupId>
-            <artifactId>hamcrest-all</artifactId>
-            <version>1.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <version>1.8.5</version>
-        </dependency>
-        <dependency>
-            <groupId>commons-configuration</groupId>
-            <artifactId>commons-configuration</artifactId>
-            <version>1.7</version>
-        </dependency>
-        <dependency>
-            <groupId>net.sf.jopt-simple</groupId>
-            <artifactId>jopt-simple</artifactId>
-            <version>3.2</version>
-        </dependency>
-    </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java
deleted file mode 100644
index db75cb1..0000000
--- a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.handler;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.GFacHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.hadoop.provider.utils.HadoopUtils;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Properties;
-
-public class HDFSDataMovementHandler implements GFacHandler {
-    private static final Logger logger = LoggerFactory.getLogger(HDFSDataMovementHandler.class);
-
-    private boolean isWhirrBasedDeployment = false;
-    private File hadoopConfigDir;
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
-            isWhirrBasedDeployment = true;
-        } else {
-            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
-            File hadoopConfigDir = new File(hadoopConfigDirPath);
-            if (!hadoopConfigDir.exists()){
-                throw new GFacHandlerException("Specified hadoop configuration directory doesn't exist.");
-            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
-                throw new GFacHandlerException("Cannot find any hadoop configuration files inside specified directory.");
-            }
-
-            this.hadoopConfigDir = hadoopConfigDir;
-        }
-
-        if(jobExecutionContext.isInPath()){
-            try {
-                handleInPath(jobExecutionContext);
-            } catch (IOException e) {
-                throw new GFacHandlerException("Error while copying input data from local file system to HDFS.",e);
-            }
-        } else {
-            handleOutPath(jobExecutionContext);
-        }
-    }
-
-    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException, IOException {
-        ApplicationDeploymentDescriptionType appDepDesc =
-                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
-                (HadoopApplicationDeploymentDescriptionType)appDepDesc;
-        if(appDepDesc.isSetInputDataDirectory() && isInputDataDirectoryLocal(appDepDesc)){
-            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
-            FileSystem hdfs = FileSystem.get(hadoopConf);
-            hdfs.copyFromLocalFile(new Path(appDepDesc.getInputDataDirectory()),
-                    new Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
-        }
-    }
-
-    private boolean isInputDataDirectoryLocal(ApplicationDeploymentDescriptionType appDepDesc){
-        String inputDataDirectoryPath = appDepDesc.getInputDataDirectory();
-        File inputDataDirectory = new File(inputDataDirectoryPath);
-        if(inputDataDirectory.exists() && FileUtils.listFiles(inputDataDirectory, null, null).size() > 0){
-            return true;
-        }
-
-        return false;
-    }
-
-    private void handleOutPath(JobExecutionContext jobExecutionContext){}
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java
deleted file mode 100644
index 1d49a84..0000000
--- a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.handler;
-
-import com.google.common.io.Files;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.GFacHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.schemas.gfac.HadoopHostType;
-import org.apache.commons.configuration.CompositeConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.io.FileUtils;
-import org.apache.whirr.Cluster;
-import org.apache.whirr.ClusterController;
-import org.apache.whirr.ClusterControllerFactory;
-import org.apache.whirr.ClusterSpec;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.*;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.EnumSet;
-import java.util.Map;
-import java.util.Properties;
-
-import static org.apache.whirr.ClusterSpec.Property.*;
-import static org.apache.whirr.ClusterSpec.Property.INSTANCE_TEMPLATES;
-import static org.apache.whirr.ClusterSpec.Property.PRIVATE_KEY_FILE;
-
-/**
- * This handler takes care of deploying hadoop in cloud(in cloud bursting scenarios) and
- * deploying hadoop in local cluster. In case of existing hadoop cluster this will ignore
- * cluster setup just use the hadoop configuration provided by user.
- */
-public class HadoopDeploymentHandler implements GFacHandler {
-    private static final Logger logger = LoggerFactory.getLogger("hadoop-dep-handler");
-
-    /**
-     * Once invoked this method will deploy Hadoop in a local cluster or cloud based on the
-     * configuration provided. If there is a already deployed hadoop cluster this will skip
-     * deployment.
-     *
-     *
-     * @param jobExecutionContext job execution context containing all the required configurations
-     *                            and runtime information.
-     * @throws org.apache.airavata.gfac.core.handler.GFacHandlerException
-     */
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        if(jobExecutionContext.isInPath()){
-            handleInPath(jobExecutionContext);
-        } else {
-            handleOutPath(jobExecutionContext);
-        }
-    }
-
-    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        HostDescription hostDescription =
-                jobExecutionContext.getApplicationContext().getHostDescription();
-        if (!isHadoopDeploymentAvailable(hostDescription)) {
-            // Temp directory to keep generated configuration files.
-            File tempDirectory = Files.createTempDir();
-            try {
-                File hadoopSiteXML = launchHadoopCluster(hostDescription, tempDirectory);
-                jobExecutionContext.getInMessageContext().addParameter("HADOOP_SITE_XML", hadoopSiteXML.getAbsolutePath());
-                jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE", "WHIRR");
-                // TODO: Add hadoop-site.xml to job execution context.
-            } catch (IOException e) {
-                throw new GFacHandlerException("IO Error while processing configurations.",e);
-            } catch (ConfigurationException e) {
-                throw  new GFacHandlerException("Whirr configuration error.", e);
-            } catch (InterruptedException e) {
-                throw new GFacHandlerException("Hadoop cluster launch interrupted.", e);
-            } catch (TransformerException e) {
-                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
-            } catch (ParserConfigurationException e) {
-                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
-            }
-        } else {
-            jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE",
-                    "MANUAL");
-            jobExecutionContext.getInMessageContext().addParameter("HADOOP_CONFIG_DIR",
-                    ((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory());
-            logger.info("Hadoop configuration is available. Skipping hadoop deployment.");
-            if(logger.isDebugEnabled()){
-                logger.debug("Hadoop configuration directory: " +
-                        getHadoopConfigDirectory(hostDescription));
-            }
-        }
-    }
-
-    private void handleOutPath(JobExecutionContext jobExecutionContext){
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        if(((String)inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE")).equals("WHIRR")){
-            // TODO: Shutdown hadoop cluster.
-            logger.info("Shutdown hadoop cluster.");
-        }
-    }
-
-    private File launchHadoopCluster(HostDescription hostDescription, File workingDirectory)
-            throws IOException, GFacHandlerException, ConfigurationException, InterruptedException, TransformerException, ParserConfigurationException {
-        ClusterSpec hadoopClusterSpec =
-                whirrConfigurationToClusterSpec(hostDescription, workingDirectory);
-        ClusterController hadoopClusterController =
-                createClusterController(hadoopClusterSpec.getServiceName());
-        Cluster hadoopCluster =  hadoopClusterController.launchCluster(hadoopClusterSpec);
-
-        logger.info(String.format("Started cluster of %s instances.\n",
-                hadoopCluster.getInstances().size()));
-
-        File siteXML = new File(workingDirectory, "hadoop-site.xml");
-        clusterPropertiesToHadoopSiteXml(hadoopCluster.getConfiguration(), siteXML);
-
-        return siteXML;
-    }
-
-    private ClusterController createClusterController(String serviceName){
-        ClusterControllerFactory factory = new ClusterControllerFactory();
-        ClusterController controller = factory.create(serviceName);
-
-        if(controller == null){
-            logger.warn("Unable to find the service {0}, using default.", serviceName);
-            controller = factory.create(null);
-        }
-
-        return controller;
-    }
-
-    private ClusterSpec whirrConfigurationToClusterSpec(HostDescription hostDescription,
-                                                        File workingDirectory) throws IOException, GFacHandlerException, ConfigurationException {
-        File whirrConfig = getWhirrConfigurationFile(hostDescription, workingDirectory);
-        CompositeConfiguration compositeConfiguration = new CompositeConfiguration();
-        Configuration configuration = new PropertiesConfiguration(whirrConfig);
-        compositeConfiguration.addConfiguration(configuration);
-
-        ClusterSpec hadoopClusterSpec = new ClusterSpec(compositeConfiguration);
-
-        for (ClusterSpec.Property required : EnumSet.of(CLUSTER_NAME, PROVIDER, IDENTITY, CREDENTIAL,
-                INSTANCE_TEMPLATES, PRIVATE_KEY_FILE)) {
-            if (hadoopClusterSpec.getConfiguration().getString(required.getConfigName()) == null) {
-                throw new IllegalArgumentException(String.format("Option '%s' not set.",
-                        required.getSimpleName()));
-            }
-        }
-
-        return hadoopClusterSpec;
-    }
-
-    private File getWhirrConfigurationFile(HostDescription hostDescription, File workingDirectory)
-            throws GFacHandlerException, IOException {
-        HadoopHostType hadoopHostDesc = (HadoopHostType)hostDescription;
-        if(hadoopHostDesc.isSetWhirrConfiguration()){
-            HadoopHostType.WhirrConfiguration whirrConfig = hadoopHostDesc.getWhirrConfiguration();
-            if(whirrConfig.isSetConfigurationFile()){
-                File whirrConfigFile = new File(whirrConfig.getConfigurationFile());
-                if(!whirrConfigFile.exists()){
-                    throw new GFacHandlerException(
-                            "Specified whirr configuration file doesn't exists.");
-                }
-
-                FileUtils.copyFileToDirectory(whirrConfigFile, workingDirectory);
-
-                return new File(workingDirectory, whirrConfigFile.getName());
-            } else if(whirrConfig.isSetConfiguration()){
-                Properties whirrConfigProps =
-                        whirrConfigurationsToProperties(whirrConfig.getConfiguration());
-                File whirrConfigFile = new File(workingDirectory, "whirr-hadoop.config");
-                whirrConfigProps.store(
-                        new FileOutputStream(whirrConfigFile), null);
-
-                return whirrConfigFile;
-            }
-        }
-
-        throw new GFacHandlerException("Cannot find Whirr configurations. Whirr configuration "
-                + "is required if you don't have already running Hadoop deployment.");
-    }
-
-    private Properties whirrConfigurationsToProperties(
-            HadoopHostType.WhirrConfiguration.Configuration configuration){
-        Properties whirrConfigProps = new Properties();
-
-        for(HadoopHostType.WhirrConfiguration.Configuration.Property property:
-                configuration.getPropertyArray()) {
-            whirrConfigProps.put(property.getName(), property.getValue());
-        }
-
-        return whirrConfigProps;
-    }
-
-    private void clusterPropertiesToHadoopSiteXml(Properties props, File hadoopSiteXml) throws ParserConfigurationException, TransformerException {
-        DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
-        DocumentBuilder documentBuilder = domFactory.newDocumentBuilder();
-
-        Document hadoopSiteXmlDoc = documentBuilder.newDocument();
-
-        hadoopSiteXmlDoc.setXmlVersion("1.0");
-        hadoopSiteXmlDoc.setXmlStandalone(true);
-        hadoopSiteXmlDoc.createProcessingInstruction("xml-stylesheet", "type=\"text/xsl\" href=\"configuration.xsl\"");
-
-        Element configEle = hadoopSiteXmlDoc.createElement("configuration");
-
-        hadoopSiteXmlDoc.appendChild(configEle);
-
-        for(Map.Entry<Object, Object> entry : props.entrySet()){
-            addPropertyToConfiguration(entry, configEle, hadoopSiteXmlDoc);
-        }
-
-        saveDomToFile(hadoopSiteXmlDoc, hadoopSiteXml);
-    }
-
-    private void saveDomToFile(Document dom, File destFile) throws TransformerException {
-        Source source = new DOMSource(dom);
-
-        Result result = new StreamResult(destFile);
-
-        Transformer transformer = TransformerFactory.newInstance().newTransformer();
-        transformer.transform(source, result);
-    }
-
-    private void addPropertyToConfiguration(Map.Entry<Object, Object> entry, Element configElement, Document doc){
-        Element property = doc.createElement("property");
-        configElement.appendChild(property);
-
-        Element nameEle = doc.createElement("name");
-        nameEle.setTextContent(entry.getKey().toString());
-        property.appendChild(nameEle);
-
-        Element valueEle = doc.createElement("value");
-        valueEle.setTextContent(entry.getValue().toString());
-        property.appendChild(valueEle);
-    }
-
-    private boolean isHadoopDeploymentAvailable(HostDescription hostDescription) {
-        return ((HadoopHostType) hostDescription.getType()).isSetHadoopConfigurationDirectory();
-    }
-
-    private String getHadoopConfigDirectory(HostDescription hostDescription){
-        return ((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory();
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java
deleted file mode 100644
index 30a1bf9..0000000
--- a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.provider.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.ArrayList;
-import java.util.Map;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.hadoop.provider.utils.HadoopUtils;
-import org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-/**
- * Executes hadoop job using the cluster configuration provided by handlers in
- * in-flow.
- */
-public class HadoopProvider extends AbstractProvider {
-    private static final Logger logger = LoggerFactory.getLogger(HadoopProvider.class);
-
-    private boolean isWhirrBasedDeployment = false;
-    private File hadoopConfigDir;
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
-            isWhirrBasedDeployment = true;
-        } else {
-            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
-            File hadoopConfigDir = new File(hadoopConfigDirPath);
-            if (!hadoopConfigDir.exists()){
-                throw new GFacProviderException("Specified hadoop configuration directory doesn't exist.");
-            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
-                throw new GFacProviderException("Cannot find any hadoop configuration files inside specified directory.");
-            }
-
-            this.hadoopConfigDir = hadoopConfigDir;
-        }
-    }
-
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
-                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
-                        .getApplicationContext().getApplicationDeploymentDescription().getType();
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration jobConf = hadoopAppDesc.getHadoopJobConfiguration();
-
-        try{
-            // Preparing Hadoop configuration
-            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
-                    jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
-
-            // Load jar containing map-reduce job implementation
-            ArrayList<URL> mapRedJars = new ArrayList<URL>();
-            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
-            URLClassLoader childClassLoader = new URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
-                    this.getClass().getClassLoader());
-
-            Job job = new Job(hadoopConf);
-
-            job.setJobName(jobConf.getJobName());
-
-            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), true, childClassLoader));
-            job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, childClassLoader));
-
-            job.setMapperClass((Class<? extends Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
-            job.setCombinerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
-            job.setReducerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
-
-            job.setInputFormatClass((Class<? extends InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, childClassLoader));
-            job.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));
-
-            FileInputFormat.setInputPaths(job, new Path(hadoopAppDesc.getInputDataDirectory()));
-            FileOutputFormat.setOutputPath(job, new Path(hadoopAppDesc.getOutputDataDirectory()));
-
-            job.waitForCompletion(true);
-            System.out.println(job.getTrackingURL());
-            if(jobExecutionContext.getOutMessageContext() == null){
-                jobExecutionContext.setOutMessageContext(new MessageContext());
-            }
-
-            OutputParameterType[] outputParametersArray = jobExecutionContext.getApplicationContext().
-                    getServiceDescription().getType().getOutputParametersArray();
-            for(OutputParameterType outparamType : outputParametersArray){
-                String paramName = outparamType.getParameterName();
-                if(paramName.equals("test-hadoop")){
-                    ActualParameter outParam = new ActualParameter();
-                    outParam.getType().changeType(StringParameterType.type);
-                    ((StringParameterType) outParam.getType()).setValue(job.getTrackingURL());
-                    jobExecutionContext.getOutMessageContext().addParameter("test-hadoop", outParam);
-                }
-            }
-        } catch (Exception e) {
-            String errMessage = "Error occurred during Map-Reduce job execution.";
-            logger.error(errMessage, e);
-            throw new GFacProviderException(errMessage, e);
-        }
-    }
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        // TODO: How to handle cluster shutdown. Best way is to introduce inPath/outPath to handler.
-    }
-
-    @Override
-    public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacException {
-        throw new NotImplementedException();
-    }
-
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java
deleted file mode 100644
index 9d46446..0000000
--- a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.provider.utils;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.util.Collection;
-
-public class HadoopUtils {
-    public static Configuration createHadoopConfiguration(
-            JobExecutionContext jobExecutionContext,
-            boolean isWhirrBasedDeployment,
-            File hadoopConfigDir) throws FileNotFoundException {
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        Configuration hadoopConf = new Configuration();
-
-        if(isWhirrBasedDeployment){
-            hadoopConf.addResource(new FileInputStream(
-                    new File((String)inMessageContext.getParameter("HADOOP_SITE_XML"))));
-        } else {
-            readHadoopClusterConfigurationFromDirectory(hadoopConfigDir, hadoopConf);
-        }
-
-        return hadoopConf;
-    }
-
-    private static void readHadoopClusterConfigurationFromDirectory(File localHadoopConfigurationDirectory, Configuration hadoopConf)
-            throws FileNotFoundException {
-        Collection hadoopConfigurationFiles =
-                FileUtils.listFiles(localHadoopConfigurationDirectory, null, false);
-        for (Object f : hadoopConfigurationFiles) {
-            hadoopConf.addResource(new FileInputStream((File)f));
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/resources/errors.properties b/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
deleted file mode 100644
index 88c41b8..0000000
--- a/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
+++ /dev/null
@@ -1,197 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# Directly copied from jglobus. Not a good way to manager error properties.
-1 = Parameter not supported
-2 = The RSL length is greater than the maximum allowed
-3 = No resources available
-4 = Bad directory specified
-5 = The executable does not exist
-6 = Insufficient funds
-7 = Authentication with the remote server failed
-8 = Job cancelled by user
-9 = Job cancelled by system
-
-10 = Data transfer to the server failed
-11 = The stdin file does not exist
-12 = The connection to the server failed (check host and port)
-13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
-14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
-15 = The job manager received an invalid RSL
-16 = Could not connect to job manager
-17 = The job failed when the job manager attempted to run it
-18 = Paradyn error
-19 = The provided RSL 'jobtype' value is invalid
-
-20 = The provided RSL 'myjob' value is invalid
-21 = The job manager failed to locate an internal script argument file
-22 = The job manager failed to create an internal script argument file
-23 = The job manager detected an invalid job state
-24 = The job manager detected an invalid script response
-25 = The job manager detected an invalid job state
-26 = The provided RSL 'jobtype' value is not supported by this job manager
-27 = Unimplemented
-28 = The job manager failed to create an internal script submission file
-29 = The job manager cannot find the user proxy
-
-30 = The job manager failed to open the user proxy
-31 = The job manager failed to cancel the job as requested
-32 = System memory allocation failed
-33 = The interprocess job communication initialization failed
-34 = The interprocess job communication setup failed
-35 = The provided RSL 'host count' value is invalid
-36 = One of the provided RSL parameters is unsupported
-37 = The provided RSL 'queue' parameter is invalid
-38 = The provided RSL 'project' parameter is invalid
-39 = The provided RSL string includes variables that could not be identified
-
-40 = The provided RSL 'environment' parameter is invalid
-41 = The provided RSL 'dryrun' parameter is invalid
-42 = The provided RSL is invalid (an empty string)
-43 = The job manager failed to stage the executable
-44 = The job manager failed to stage the stdin file
-45 = The requested job manager type is invalid
-46 = The provided RSL 'arguments' parameter is invalid
-47 = The gatekeeper failed to run the job manager
-48 = The provided RSL could not be properly parsed
-49 = There is a version mismatch between GRAM components
-
-50 = The provided RSL 'arguments' parameter is invalid
-51 = The provided RSL 'count' parameter is invalid
-52 = The provided RSL 'directory' parameter is invalid
-53 = The provided RSL 'dryrun' parameter is invalid
-54 = The provided RSL 'environment' parameter is invalid
-55 = The provided RSL 'executable' parameter is invalid
-56 = The provided RSL 'host_count' parameter is invalid
-57 = The provided RSL 'jobtype' parameter is invalid
-58 = The provided RSL 'maxtime' parameter is invalid
-59 = The provided RSL 'myjob' parameter is invalid
-
-60 = The provided RSL 'paradyn' parameter is invalid
-61 = The provided RSL 'project' parameter is invalid
-62 = The provided RSL 'queue' parameter is invalid
-63 = The provided RSL 'stderr' parameter is invalid
-64 = The provided RSL 'stdin' parameter is invalid
-65 = The provided RSL 'stdout' parameter is invalid
-66 = The job manager failed to locate an internal script
-67 = The job manager failed on the system call pipe()
-68 = The job manager failed on the system call fcntl()
-69 = The job manager failed to create the temporary stdout filename
-
-70 = The job manager failed to create the temporary stderr filename
-71 = The job manager failed on the system call fork()
-72 = The executable file permissions do not allow execution
-73 = The job manager failed to open stdout
-74 = The job manager failed to open stderr
-75 = The cache file could not be opened in order to relocate the user proxy
-76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
-77 = The job manager failed to insert the contact in the client contact list
-78 = The contact was not found in the job manager's client contact list
-79 = Connecting to the job manager failed.  Possible reasons: job terminated, invalid job contact, network problems, ...
-
-80 = The syntax of the job contact is invalid
-81 = The executable parameter in the RSL is undefined
-82 = The job manager service is misconfigured.  condor arch undefined
-83 = The job manager service is misconfigured.  condor os undefined
-84 = The provided RSL 'min_memory' parameter is invalid
-85 = The provided RSL 'max_memory' parameter is invalid
-86 = The RSL 'min_memory' value is not zero or greater
-87 = The RSL 'max_memory' value is not zero or greater
-88 = The creation of a HTTP message failed
-89 = Parsing incoming HTTP message failed
-
-90 = The packing of information into a HTTP message failed
-91 = An incoming HTTP message did not contain the expected information
-92 = The job manager does not support the service that the client requested
-93 = The gatekeeper failed to find the requested service
-94 = The jobmanager does not accept any new requests (shutting down)
-95 = The client failed to close the listener associated with the callback URL
-96 = The gatekeeper contact cannot be parsed
-97 = The job manager could not find the 'poe' command
-98 = The job manager could not find the 'mpirun' command
-99 = The provided RSL 'start_time' parameter is invalid"
-100 = The provided RSL 'reservation_handle' parameter is invalid
-
-101 = The provided RSL 'max_wall_time' parameter is invalid
-102 = The RSL 'max_wall_time' value is not zero or greater
-103 = The provided RSL 'max_cpu_time' parameter is invalid
-104 = The RSL 'max_cpu_time' value is not zero or greater
-105 = The job manager is misconfigured, a scheduler script is missing
-106 = The job manager is misconfigured, a scheduler script has invalid permissions
-107 = The job manager failed to signal the job
-108 = The job manager did not recognize/support the signal type
-109 = The job manager failed to get the job id from the local scheduler
-
-110 = The job manager is waiting for a commit signal
-111 = The job manager timed out while waiting for a commit signal
-112 = The provided RSL 'save_state' parameter is invalid
-113 = The provided RSL 'restart' parameter is invalid
-114 = The provided RSL 'two_phase' parameter is invalid
-115 = The RSL 'two_phase' value is not zero or greater
-116 = The provided RSL 'stdout_position' parameter is invalid
-117 = The RSL 'stdout_position' value is not zero or greater
-118 = The provided RSL 'stderr_position' parameter is invalid
-119 = The RSL 'stderr_position' value is not zero or greater
-
-120 = The job manager restart attempt failed
-121 = The job state file doesn't exist
-122 = Could not read the job state file
-123 = Could not write the job state file
-124 = The old job manager is still alive
-125 = The job manager state file TTL expired
-126 = It is unknown if the job was submitted
-127 = The provided RSL 'remote_io_url' parameter is invalid
-128 = Could not write the remote io url file
-129 = The standard output/error size is different
-
-130 = The job manager was sent a stop signal (job is still running)
-131 = The user proxy expired (job is still running)
-132 = The job was not submitted by original jobmanager
-133 = The job manager is not waiting for that commit signal
-134 = The provided RSL scheduler specific parameter is invalid
-135 = The job manager could not stage in a file
-136 = The scratch directory could not be created
-137 = The provided 'gass_cache' parameter is invalid
-138 = The RSL contains attributes which are not valid for job submission
-139 = The RSL contains attributes which are not valid for stdio update
-
-140 = The RSL contains attributes which are not valid for job restart
-141 = The provided RSL 'file_stage_in' parameter is invalid
-142 = The provided RSL 'file_stage_in_shared' parameter is invalid
-143 = The provided RSL 'file_stage_out' parameter is invalid
-144 = The provided RSL 'gass_cache' parameter is invalid
-145 = The provided RSL 'file_cleanup' parameter is invalid
-146 = The provided RSL 'scratch_dir' parameter is invalid
-147 = The provided scheduler-specific RSL parameter is invalid
-148 = A required RSL attribute was not defined in the RSL spec
-149 = The gass_cache attribute points to an invalid cache directory
-
-150 = The provided RSL 'save_state' parameter has an invalid value
-151 = The job manager could not open the RSL attribute validation file
-152 = The  job manager could not read the RSL attribute validation file
-153 = The provided RSL 'proxy_timeout' is invalid
-154 = The RSL 'proxy_timeout' value is not greater than zero
-155 = The job manager could not stage out a file
-156 = The job contact string does not match any which the job manager is handling
-157 = Proxy delegation failed
-158 = The job manager could not lock the state lock file
-
-1000 = Failed to start up callback handler
-1003 = Job contact not set

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/resources/service.properties b/modules/gfac/gfac-hadoop/src/main/resources/service.properties
deleted file mode 100644
index 391bfea..0000000
--- a/modules/gfac/gfac-hadoop/src/main/resources/service.properties
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-
-#
-# Class which implemented Scheduler interface. It will be used to determine a Provider
-#
-scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
-
-#
-# Data Service Plugins classes
-#
-datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
-
-#
-# Pre execution Plugins classes. For example, GridFTP Input Staging
-#
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
-
-#
-# Post execution Plugins classes. For example, GridFTP Output Staging
-#
-postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
-postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
-
-#
-# SSH private key location. It will be used by SSHProvider
-#
-# ssh.key=/home/user/.ssh/id_rsa
-# ssh.keypass=
-# ssh.username=usernameAtHost
-
-#
-# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
-#
-# myproxy.server=myproxy.teragrid.org
-# myproxy.user=username
-# myproxy.pass=password
-# myproxy.life=3600
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt b/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
deleted file mode 100644
index e749e9c..0000000
--- a/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
+++ /dev/null
@@ -1,73 +0,0 @@
-<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
-	distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
-	the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
-	obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
-	in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-	ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
-	the License. -->
-<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
-<xsl:output method="text" />
-<xsl:template match="/ns:JobDescriptor">
-#! /bin/sh
-# PBS batch job script built by Globus job manager
-#   <xsl:choose>
-    <xsl:when test="ns:shellName">
-##PBS -S <xsl:value-of select="ns:shellName"/>
-    </xsl:when></xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:queueName">
-#PBS -q <xsl:value-of select="ns:queueName"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:mailOptions">
-#PBS -m <xsl:value-of select="ns:mailOptions"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-<xsl:when test="ns:acountString">
-#PBS -A <xsl:value-of select="ns:acountString"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:maxWallTime">
-#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -o <xsl:value-of select="ns:standardOutFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
-#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of select="ns:processesPerNode"/>
-<xsl:text>&#xa;</xsl:text>
-    </xsl:when>
-    </xsl:choose>
-<xsl:for-each select="ns:exports/ns:name">
-<xsl:value-of select="."/>=<xsl:value-of select="./@value"/><xsl:text>&#xa;</xsl:text>
-export<xsl:text>   </xsl:text><xsl:value-of select="."/>
-<xsl:text>&#xa;</xsl:text>
-</xsl:for-each>
-<xsl:for-each select="ns:preJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-cd <xsl:text>   </xsl:text><xsl:value-of select="ns:workingDirectory"/><xsl:text>&#xa;</xsl:text>
-    <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
-<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text>   </xsl:text></xsl:when></xsl:choose><xsl:value-of select="ns:executablePath"/><xsl:text>   </xsl:text>
-<xsl:for-each select="ns:inputs/ns:input">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-<xsl:for-each select="ns:postJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-</xsl:for-each>
-
-</xsl:template>
-
-</xsl:stylesheet>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/test/resources/logging.properties b/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
deleted file mode 100644
index 0584d38..0000000
--- a/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#default/fallback log4j configuration
-#
-
-# Set root logger level to WARN and its only appender to A1.
-log4j.rootLogger=INFO, A1, A2
-
-# A1 is set to be a rolling file appender with default params
-log4j.appender.A1=org.apache.log4j.RollingFileAppender
-log4j.appender.A1.File=target/seclogs.txt
-
-# A1 uses PatternLayout.
-log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
-
-# A2 is a console appender
-log4j.appender.A2=org.apache.log4j.ConsoleAppender
-
-# A2 uses PatternLayout.
-log4j.appender.A2.layout=org.apache.log4j.PatternLayout
-log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
-
-log4j.logger.unicore.security=INFO
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/pom.xml b/modules/gfac/pom.xml
index 9454f37..7f8737f 100644
--- a/modules/gfac/pom.xml
+++ b/modules/gfac/pom.xml
@@ -32,11 +32,8 @@
             </activation>
             <modules>
                 <module>gfac-core</module>
-                <module>gfac-ec2</module>
                 <module>gfac-ssh</module>
                 <module>gfac-local</module>
-                <!--<module>gfac-hadoop</module>-->
-                <!--<module>gfac-gram</module>-->
                 <module>gfac-gsissh</module>
                 <module>gfac-bes</module>
                 <module>gfac-monitor</module>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/workflow-model/workflow-engine/pom.xml
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-engine/pom.xml b/modules/workflow-model/workflow-engine/pom.xml
index 7de0b8d..66445ff 100644
--- a/modules/workflow-model/workflow-engine/pom.xml
+++ b/modules/workflow-model/workflow-engine/pom.xml
@@ -267,11 +267,11 @@
             <artifactId>airavata-gfac-core</artifactId>
             <version>${project.version}</version>
         </dependency>
-        <dependency>
+<!--        <dependency>
             <groupId>org.apache.airavata</groupId>
             <artifactId>airavata-gfac-ec2</artifactId>
             <version>${project.version}</version>
-        </dependency>
+        </dependency>-->
         <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-api</artifactId>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/xbaya-gui/pom.xml
----------------------------------------------------------------------
diff --git a/modules/xbaya-gui/pom.xml b/modules/xbaya-gui/pom.xml
index 9589084..1a39990 100644
--- a/modules/xbaya-gui/pom.xml
+++ b/modules/xbaya-gui/pom.xml
@@ -232,11 +232,11 @@
             <artifactId>airavata-gfac-core</artifactId>
             <version>${project.version}</version>
         </dependency>
-        <dependency>
+<!--        <dependency>
             <groupId>org.apache.airavata</groupId>
             <artifactId>airavata-gfac-ec2</artifactId>
             <version>${project.version}</version>
-        </dependency>
+        </dependency>-->
         <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-api</artifactId>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
index aab86a4..33012c5 100644
--- a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
+++ b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
@@ -31,7 +31,6 @@ import java.util.Observable;
 import java.util.Observer;
 
 import org.apache.airavata.api.Airavata.Client;
-import org.apache.airavata.gfac.ec2.AmazonSecurityContext;
 import org.apache.airavata.model.error.AiravataClientConnectException;
 import org.apache.airavata.schemas.wec.ContextHeaderDocument;
 import org.apache.airavata.workflow.model.component.registry.JCRComponentRegistry;
@@ -141,7 +140,6 @@ public class XBayaConfiguration extends Observable implements Observer {
 
     private boolean regURLSetByCMD = false;
 
-    private AmazonSecurityContext amazonSecurityContext = null;
 
     private ContextHeaderDocument.ContextHeader contextHeader;
 
@@ -330,22 +328,6 @@ public class XBayaConfiguration extends Observable implements Observer {
     }
 
     /**
-     * Get the AmazonSecurityContext needed for cloud job submission.
-     * @return AmazonSecurityContext
-     */
-    public AmazonSecurityContext getAmazonSecurityContext() {
-        return amazonSecurityContext;
-    }
-
-    /**
-     * Set the AmazonSecurityContext needed for cloud job submission.
-     * @param amazonSecurityContext amazon security context.
-     */
-    public void setAmazonSecurityContext(AmazonSecurityContext amazonSecurityContext) {
-        this.amazonSecurityContext = amazonSecurityContext;
-    }
-
-    /**
      * @return The DSC URL
      */
     public URI getDSCURL() {

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
----------------------------------------------------------------------
diff --git a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
index d67caad..53659fa 100644
--- a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
+++ b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
@@ -21,11 +21,6 @@
 
 package org.apache.airavata.xbaya.ui.dialogs.amazon;
 
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import org.apache.airavata.gfac.ec2.EC2Provider;
-import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
 import org.apache.airavata.xbaya.XBayaEngine;
 import org.apache.airavata.xbaya.core.amazon.AmazonCredential;
 import org.apache.airavata.xbaya.ui.dialogs.XBayaDialog;
@@ -33,12 +28,12 @@ import org.apache.airavata.xbaya.ui.widgets.GridPanel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaLabel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaTextField;
 
-import javax.swing.*;
+import javax.swing.AbstractAction;
+import javax.swing.JButton;
+import javax.swing.JDialog;
+import javax.swing.JOptionPane;
+import javax.swing.JPanel;
 import java.awt.event.ActionEvent;
-import java.io.File;
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
 
 public class ChangeCredentialWindow {
     private XBayaEngine engine;
@@ -109,7 +104,7 @@ public class ChangeCredentialWindow {
 
             @Override
             public void actionPerformed(ActionEvent e) {
-                String accessID = ChangeCredentialWindow.this.accessKeyIDTextField.getText();
+               /* String accessID = ChangeCredentialWindow.this.accessKeyIDTextField.getText();
                 if (!"".equals(accessID)) {
                     String secretID = ChangeCredentialWindow.this.secretAccessKeyTextField.getText();
 
@@ -152,7 +147,7 @@ public class ChangeCredentialWindow {
                         hide();
                         return;
                     }
-                }
+                }*/
 
                 JOptionPane.showMessageDialog(dialog.getDialog(),"SecretKey and AccessKey can not be empty!");
             }


[2/4] airavata git commit: removed gfac-ec2, gfac-gram and gfac-hadoop modules from source.

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/provider/impl/GramProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/provider/impl/GramProvider.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/provider/impl/GramProvider.java
deleted file mode 100644
index 2f22e0d..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/provider/impl/GramProvider.java
+++ /dev/null
@@ -1,539 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.provider.impl;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.util.Map;
-import java.util.MissingResourceException;
-import java.util.Properties;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.JobSubmissionFault;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.notification.events.JobIDEvent;
-import org.apache.airavata.gfac.core.notification.events.StartExecutionEvent;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.apache.airavata.gfac.gram.util.GramJobSubmissionListener;
-import org.apache.airavata.gfac.gram.util.GramProviderUtils;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.globus.gram.GramException;
-import org.globus.gram.GramJob;
-import org.globus.gram.WaitingForCommitException;
-import org.globus.gram.internal.GRAMConstants;
-import org.globus.gram.internal.GRAMProtocolErrorConstants;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GramProvider extends AbstractProvider {
-    private static final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
-
-    private GramJob job;
-    private GramJobSubmissionListener listener;
-    private boolean twoPhase = true;
-
-    /**
-     * If normal job submission fail due to an authorisation failure or script failure we
-     * will re-attempt to submit the job. In-order to avoid any recursive loop during a continuous
-     * failure we track whether failure paths are tried or not. Following booleans keeps track whether
-     * we already tried a failure path or not.
-     */
-    /**
-     * To track job submissions during a authorisation failure while requesting job.
-     */
-    private boolean renewCredentialsAttempt = false;
-    /**
-     * To track job submission during a script error situation.
-     */
-    private boolean reSubmissionInProgress = false;
-    /**
-     * To track authorisation failures during status monitoring.
-     */
-    private boolean authorisationFailedAttempt = false;
-
-    private static final Map<String, GramJob> currentlyExecutingJobCache
-            = new ConcurrentHashMap<String, GramJob>();
-
-    private static Properties resources;
-
-    static {
-        try {
-
-            String propFileName = "errors.properties";
-            resources = new Properties();
-            InputStream inputStream = GramProvider.class.getClassLoader()
-                    .getResourceAsStream(propFileName);
-
-            if (inputStream == null) {
-                throw new FileNotFoundException("property file '" + propFileName
-                        + "' not found in the classpath");
-            }
-
-            resources.load(inputStream);
-
-        } catch (FileNotFoundException mre) {
-            log.error("errors.properties not found", mre);
-        } catch (IOException e) {
-            log.error("Error reading errors.properties file", e);
-        }
-    }
-
-
-    // This method prepare the environment before the application invocation.
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-
-        try {
-        	super.initialize(jobExecutionContext);
-            String strTwoPhase = ServerSettings.getSetting("TwoPhase");
-            if (strTwoPhase != null) {
-                twoPhase = Boolean.parseBoolean(strTwoPhase);
-                log.info("Two phase commit is set to " + twoPhase);
-            }
-        } catch (ApplicationSettingsException e) {
-            log.warn("Error reading TwoPhase property from configurations.", e);
-        }
-
-        job = GramProviderUtils.setupEnvironment(jobExecutionContext, twoPhase);
-        listener = new GramJobSubmissionListener(job, jobExecutionContext);
-        job.addListener(listener);
-    }
-
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException{
-        jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-        GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription().getType();
-
-        StringBuilder stringBuilder = new StringBuilder();
-        try {
-
-            GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
-                    getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-            job.setCredentials(gssCred);
-            // We do not support multiple gatekeepers in XBaya GUI, so we simply pick the 0th element in the array
-            String gateKeeper = host.getGlobusGateKeeperEndPointArray(0);
-            log.info("Request to contact:" + gateKeeper);
-
-            stringBuilder.append("Finished launching job, Host = ").append(host.getHostAddress()).append(" RSL = ")
-                    .append(job.getRSL()).append(" working directory = ").append(app.getStaticWorkingDirectory())
-                    .append(" temp directory = ").append(app.getScratchWorkingDirectory())
-                    .append(" Globus GateKeeper Endpoint = ").append(gateKeeper);
-
-            log.info(stringBuilder.toString());
-
-            submitJobs(gateKeeper, jobExecutionContext, host);
-
-        } catch (ApplicationSettingsException e) {
-        	throw new GFacException(e.getMessage(), e);
-		} finally {
-            if (job != null) {
-                try {
-                	 /*
-                     * Remove listener
-                     */
-                    job.removeListener(listener);
-                } catch (Exception e) {
-                	 log.error(e.getMessage());
-                }
-            }
-        }
-    }
-
-    private void submitJobs(String gateKeeper,
-                            JobExecutionContext jobExecutionContext,
-                            GlobusHostType globusHostType) throws GFacException, GFacProviderException {
-    	boolean applicationSaved=false;
-    	String taskID = jobExecutionContext.getTaskData().getTaskID();
-			
-    	if (twoPhase) {
-            try {
-                /*
-                * The first boolean is to force communication through SSLv3
-                * The second boolean is to specify the job is a batch job - use true for interactive and false for
-                 * batch.
-                * The third boolean is to specify to use the full proxy and not delegate a limited proxy.
-                */
-//                job.request(true, gateKeeper, false, false);
-                
-            	
-            	// first boolean -> to run job as batch
-            	// second boolean -> to use limited proxy
-            	//TODO: need review?
-                job.request(gateKeeper, false, false);
-                
-                // Single boolean to track all authentication failures, therefore we need to re-initialize
-                // this here
-                renewCredentialsAttempt = false;
-
-            } catch (WaitingForCommitException e) {
-            	String jobID = job.getIDAsString();
-				
-            	details.setJobID(jobID);
-            	details.setJobDescription(job.getRSL());
-                jobExecutionContext.setJobDetails(details);
-                GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.UN_SUBMITTED);
-                
-                applicationSaved=true;
-                String jobStatusMessage = "Un-submitted JobID= " + jobID;
-                log.info(jobStatusMessage);
-                jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
-
-                log.info("Two phase commit: sending COMMIT_REQUEST signal; Job id - " + jobID);
-
-                try {
-                    job.signal(GramJob.SIGNAL_COMMIT_REQUEST);
-
-                } catch (GramException gramException) {
-                    throw new GFacException("Error while sending commit request. Job Id - "
-                            + job.getIDAsString(), gramException);
-                } catch (GSSException gssException) {
-
-                    // User credentials are invalid
-                    log.error("Error while submitting commit request - Credentials provided are invalid. Job Id - "
-                            + job.getIDAsString(), e);
-                    log.info("Attempting to renew credentials and re-submit commit signal...");
-                	GFacUtils.saveErrorDetails(jobExecutionContext, gssException.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                    renewCredentials(jobExecutionContext);
-
-                    try {
-                        job.signal(GramJob.SIGNAL_COMMIT_REQUEST);
-                    } catch (GramException e1) {
-                     	GFacUtils.saveErrorDetails(jobExecutionContext, gssException.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                    	throw new GFacException("Error while sending commit request. Job Id - "
-                                + job.getIDAsString(), e1);
-                    } catch (GSSException e1) {
-                     	GFacUtils.saveErrorDetails(jobExecutionContext, gssException.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                        throw new GFacException("Error while sending commit request. Job Id - "
-                                + job.getIDAsString() + ". Credentials provided invalid", e1);
-                    }
-                }
-                GFacUtils.updateJobStatus(jobExecutionContext, details, JobState.SUBMITTED);
-                jobStatusMessage = "Submitted JobID= " + job.getIDAsString();
-                log.info(jobStatusMessage);
-                jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
-
-            } catch (GSSException e) {
-                // Renew credentials and re-submit
-             	GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                
-                reSubmitJob(gateKeeper, jobExecutionContext, globusHostType, e);
-
-            } catch (GramException e) {
-             	GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                
-            	throw new GFacException("An error occurred while submitting a job, job id = " + job.getIDAsString(), e);
-            }
-        } else {
-
-            /*
-            * The first boolean is to force communication through SSLv3
-            * The second boolean is to specify the job is a batch job - use true for interactive and false for
-             * batch.
-            * The third boolean is to specify to use the full proxy and not delegate a limited proxy.
-            */
-            try {
-                  
-//                job.request(true, gateKeeper, false, false);
-                
-            	// first boolean -> to run job as batch
-            	// second boolean -> to use limited proxy
-            	//TODO: need review?
-                job.request(gateKeeper, false, false);
-
-                renewCredentialsAttempt = false;
-
-            } catch (GramException e) {
-            	GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                throw new GFacException("An error occurred while submitting a job, job id = " + job.getIDAsString(), e);
-            } catch (GSSException e) {
-            	GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
-                // Renew credentials and re-submit
-                reSubmitJob(gateKeeper, jobExecutionContext, globusHostType, e);
-            }
-
-            String jobStatusMessage = "Un-submitted JobID= " + job.getIDAsString();
-            log.info(jobStatusMessage);
-            jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
-
-        }
-
-        currentlyExecutingJobCache.put(job.getIDAsString(), job);
-        /*
-        * Wait until job is done
-        */
-        listener.waitFor();
-
-        checkJobStatus(jobExecutionContext, globusHostType, gateKeeper);
-
-    }
-
-    private void renewCredentials(JobExecutionContext jobExecutionContext) throws GFacException {
-
-        renewCredentials(this.job, jobExecutionContext);
-    }
-
-    private void renewCredentials(GramJob gramJob, JobExecutionContext jobExecutionContext) throws GFacException {
-
-        try {
-        	GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
-                    getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).renewCredentials();
-            gramJob.renew(gssCred);
-        } catch (GramException e1) {
-            throw new GFacException("Unable to renew credentials. Job Id - "
-                    + gramJob.getIDAsString(), e1);
-        } catch (GSSException e1) {
-            throw new GFacException("Unable to renew credentials. Job Id - "
-                    + gramJob.getIDAsString(), e1);
-        } catch (ApplicationSettingsException e) {
-        	throw new GFacException(e.getLocalizedMessage(), e);
-		}
-    }
-
-    private void reSubmitJob(String gateKeeper,
-                             JobExecutionContext jobExecutionContext,
-                             GlobusHostType globusHostType, Exception e) throws GFacException, GFacProviderException {
-
-        if (!renewCredentialsAttempt) {
-
-            renewCredentialsAttempt = true;
-
-            // User credentials are invalid
-            log.error("Error while submitting job - Credentials provided are invalid. Job Id - "
-                    + job.getIDAsString(), e);
-            log.info("Attempting to renew credentials and re-submit jobs...");
-
-            // Remove existing listener and register a new listener
-            job.removeListener(listener);
-            listener = new GramJobSubmissionListener(job, jobExecutionContext);
-
-            job.addListener(listener);
-
-            renewCredentials(jobExecutionContext);
-
-            submitJobs(gateKeeper, jobExecutionContext, globusHostType);
-
-        } else {
-            throw new GFacException("Error while submitting job - Credentials provided are invalid. Job Id - "
-                    + job.getIDAsString(), e);
-        }
-
-    }
-
-    private void reSubmitJob(String gateKeeper,
-                             JobExecutionContext jobExecutionContext,
-                             GlobusHostType globusHostType) throws GFacException, GFacProviderException {
-
-        // User credentials are invalid
-        log.info("Attempting to renew credentials and re-submit jobs...");
-
-        // Remove existing listener and register a new listener
-        job.removeListener(listener);
-        listener = new GramJobSubmissionListener(job, jobExecutionContext);
-
-        job.addListener(listener);
-
-        renewCredentials(jobExecutionContext);
-
-        submitJobs(gateKeeper, jobExecutionContext, globusHostType);
-
-    }
-
-	
-	
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-    }
-
-    public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacException {
-        cancelSingleJob(jobExecutionContext.getJobDetails().getJobID(), jobExecutionContext);
-    }
-
-
-    private void cancelSingleJob(String jobId, JobExecutionContext context) throws GFacException {
-        // First check whether job id is in the cache
-        if (currentlyExecutingJobCache.containsKey(jobId)) {
-
-            synchronized (this) {
-                GramJob gramJob = currentlyExecutingJobCache.get(jobId);
-
-                // Even though we check using containsKey, at this point job could be null
-                if (gramJob != null && (gramJob.getStatus() != GRAMConstants.STATUS_DONE ||
-                        gramJob.getStatus() != GRAMConstants.STATUS_FAILED)) {
-                    cancelJob(gramJob, context);
-                }
-            }
-
-        } else {
-
-            try {
-				GSSCredential gssCred = ((GSISecurityContext)context.
-				        getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-
-				GramJob gramJob = new GramJob(null);
-				try {
-				    gramJob.setID(jobId);
-				} catch (MalformedURLException e) {
-				    throw new GFacException("Invalid job id - " + jobId, e);
-				}
-				gramJob.setCredentials(gssCred);
-
-				synchronized (this) {
-				    if (gramJob.getStatus() != GRAMConstants.STATUS_DONE ||
-				            gramJob.getStatus() != GRAMConstants.STATUS_FAILED) {
-				        cancelJob(gramJob, context);
-				    }
-				}
-			} catch (ApplicationSettingsException e) {
-				throw new GFacException(e);
-			}
-        }
-    }
-
-    private void cancelJob(GramJob gramJob, JobExecutionContext context) throws GFacException{
-
-        try {
-            gramJob.cancel();
-        } catch (GramException e) {
-            throw new GFacException("Error cancelling job, id - " + gramJob.getIDAsString(), e);
-        } catch (GSSException e) {
-
-            log.warn("Credentials invalid to cancel job. Attempting to renew credentials and re-try. " +
-                    "Job id - " + gramJob.getIDAsString());
-            renewCredentials(gramJob, context);
-
-            try {
-                gramJob.cancel();
-                gramJob.signal(GramJob.SIGNAL_COMMIT_END);
-            } catch (GramException e1) {
-                throw new GFacException("Error cancelling job, id - " + gramJob.getIDAsString(), e1);
-            } catch (GSSException e1) {
-                throw new GFacException("Error cancelling job, invalid credentials. Job id - "
-                        + gramJob.getIDAsString(), e);
-            }
-        }
-
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacException {
-
-    }
-
-    private void checkJobStatus(JobExecutionContext jobExecutionContext, GlobusHostType host, String gateKeeper)
-            throws GFacProviderException {
-        int jobStatus = listener.getCurrentStatus();
-
-        if (jobStatus == GramJob.STATUS_FAILED) {
-            
-            String errorMsg = "Job " + job.getIDAsString() + " on host " + host.getHostAddress() + " Job Exit Code = "
-                    + listener.getError() + " Error Description = " + getGramErrorString(listener.getError());
-
-            if (listener.getError() == GRAMProtocolErrorConstants.INVALID_SCRIPT_REPLY) {
-
-                // re-submitting without renewing
-                // TODO verify why we re-submit jobs when we get a invalid script reply
-                if (!reSubmissionInProgress) {
-                    reSubmissionInProgress = true;
-
-                    log.info("Invalid script reply received. Re-submitting job, id - " + job.getIDAsString());
-                    try {
-                        reSubmitJob(gateKeeper, jobExecutionContext, host);
-                    } catch (GFacException e) {
-                        throw new GFacProviderException
-                                ("Error during re-submission. Original job submission data - " + errorMsg,  e);
-                    }
-                    return;
-                }
-
-            } else if (listener.getError() == GRAMProtocolErrorConstants.ERROR_AUTHORIZATION) {
-
-                // re-submit with renewed credentials
-                if (!authorisationFailedAttempt) {
-                    authorisationFailedAttempt = true;
-                    log.info("Authorisation error contacting provider. Re-submitting job with renewed credentials.");
-
-                    try {
-                        renewCredentials(jobExecutionContext);
-                        reSubmitJob(gateKeeper, jobExecutionContext, host);
-                    } catch (GFacException e) {
-                        throw new GFacProviderException
-                                ("Error during re-submission. Original job submission data - " + errorMsg,  e);
-                    }
-
-                    return;
-                }
-
-            } else if (listener.getError() == GRAMProtocolErrorConstants.USER_CANCELLED) {
-
-                log.info("User successfully cancelled job id " + job.getIDAsString());
-                return;
-            }
-
-
-
-            log.error(errorMsg);
-
-            synchronized (this) {
-                currentlyExecutingJobCache.remove(job.getIDAsString());
-            }
-
-            throw new JobSubmissionFault(new Exception(errorMsg), host.getHostAddress(), gateKeeper,
-                            job.getRSL(), jobExecutionContext, getGramErrorString(listener.getError()),
-                    listener.getError());
-
-        } else if (jobStatus == GramJob.STATUS_DONE) {
-            log.info("Job " + job.getIDAsString() + " on host " + host.getHostAddress() + " is successfully executed.");
-
-            synchronized (this) {
-                currentlyExecutingJobCache.remove(job.getIDAsString());
-            }
-        }
-    }
-
-    public String getGramErrorString(int errorCode) {
-
-        if (resources != null) {
-            try {
-                return resources.getProperty(String.valueOf(errorCode));
-            } catch (MissingResourceException mre) {
-                log.warn("Error reading globus error descriptions.", mre);
-                return "Error code: " + errorCode;
-            }
-        } else {
-            return "Error code: " + errorCode;
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/security/GSISecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/security/GSISecurityContext.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/security/GSISecurityContext.java
deleted file mode 100644
index 395b797..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/security/GSISecurityContext.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.security;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.credential.store.credential.Credential;
-import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.gfac.AbstractSecurityContext;
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.RequestData;
-import org.globus.gsi.X509Credential;
-import org.globus.gsi.gssapi.GlobusGSSCredentialImpl;
-import org.globus.gsi.provider.GlobusProvider;
-import org.globus.myproxy.GetParams;
-import org.globus.myproxy.MyProxy;
-import org.globus.myproxy.MyProxyException;
-import org.gridforum.jgss.ExtendedGSSCredential;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.security.Security;
-import java.security.cert.X509Certificate;
-
-/**
- * Handles GRID related security.
- */
-public class GSISecurityContext extends AbstractSecurityContext {
-
-    protected static final Logger log = LoggerFactory.getLogger(GSISecurityContext.class);
-    /*
-     * context name
-     */
-    public static final String GSI_SECURITY_CONTEXT = "gsi";
-
-    public static int CREDENTIAL_RENEWING_THRESH_HOLD = 10 * 90;
-
-    private GSSCredential gssCredentials = null;
-
-    // Set trusted cert path and add provider
-    static {
-        Security.addProvider(new GlobusProvider());
-        try {
-			setUpTrustedCertificatePath();
-		} catch (ApplicationSettingsException e) {
-			log.error(e.getLocalizedMessage(), e);
-		}
-    }
-
-    public static void setUpTrustedCertificatePath(String trustedCertificatePath) {
-
-        File file = new File(trustedCertificatePath);
-
-        if (!file.exists() || !file.canRead()) {
-            File f = new File(".");
-            log.info("Current directory " + f.getAbsolutePath());
-            throw new RuntimeException("Cannot read trusted certificate path " + trustedCertificatePath);
-        } else {
-            System.setProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY, file.getAbsolutePath());
-        }
-    }
-
-    private static void setUpTrustedCertificatePath() throws ApplicationSettingsException {
-
-        String trustedCertificatePath  = ServerSettings.getSetting(Constants.TRUSTED_CERT_LOCATION);
-
-        setUpTrustedCertificatePath(trustedCertificatePath);
-    }
-
-    /**
-     * Gets the trusted certificate path. Trusted certificate path is stored in "X509_CERT_DIR"
-     * system property.
-     * @return The trusted certificate path as a string.
-     */
-    public static String getTrustedCertificatePath() {
-        return System.getProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY);
-    }
-
-
-    public GSISecurityContext(CredentialReader credentialReader, RequestData requestData) {
-        super(credentialReader, requestData);
-    }
-
-
-
-    /**
-     * Gets GSSCredentials. The process is as follows;
-     * If credentials were queried for the first time create credentials.
-     *   1. Try creating credentials using certificates stored in the credential store
-     *   2. If 1 fails use user name and password to create credentials
-     * If credentials are already created check the remaining life time of the credential. If
-     * remaining life time is less than CREDENTIAL_RENEWING_THRESH_HOLD, then renew credentials.
-     * @return GSSCredentials to be used.
-     * @throws org.apache.airavata.gfac.GFacException If an error occurred while creating credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential getGssCredentials() throws GFacException, ApplicationSettingsException {
-
-        if (gssCredentials == null) {
-
-            try {
-                gssCredentials = getCredentialsFromStore();
-            } catch (Exception e) {
-                log.error("An exception occurred while retrieving credentials from the credential store. " +
-                        "Will continue with my proxy user name and password.", e);
-            }
-
-            // If store does not have credentials try to get from user name and password
-            if (gssCredentials == null) {
-                gssCredentials = getDefaultCredentials();
-            }
-
-            // if still null, throw an exception
-            if (gssCredentials == null) {
-                throw new GFacException("Unable to retrieve my proxy credentials to continue operation.");
-            }
-        } else {
-            try {
-                if (gssCredentials.getRemainingLifetime() < CREDENTIAL_RENEWING_THRESH_HOLD) {
-                    return renewCredentials();
-                }
-            } catch (GSSException e) {
-                throw new GFacException("Unable to retrieve remaining life time from credentials.", e);
-            }
-        }
-
-        return gssCredentials;
-    }
-
-    /**
-     * Renews credentials. First try to renew credentials as a trusted renewer. If that failed
-     * use user name and password to renew credentials.
-     * @return Renewed credentials.
-     * @throws org.apache.airavata.gfac.GFacException If an error occurred while renewing credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential renewCredentials() throws GFacException, ApplicationSettingsException {
-
-        // First try to renew credentials as a trusted renewer
-        try {
-            gssCredentials = renewCredentialsAsATrustedHost();
-        } catch (Exception e) {
-            log.warn("Renewing credentials as a trusted renewer failed", e);
-            gssCredentials = getProxyCredentials();
-        }
-
-        return gssCredentials;
-    }
-
-    /**
-     * Reads the credentials from credential store.
-     * @return If token is found in the credential store, will return a valid credential. Else returns null.
-     * @throws Exception If an error occurred while retrieving credentials.
-     */
-    public GSSCredential getCredentialsFromStore() throws Exception {
-
-        if (getCredentialReader() == null) {
-            return null;
-        }
-
-        Credential credential = getCredentialReader().getCredential(getRequestData().getGatewayId(),
-                getRequestData().getTokenId());
-
-        if (credential != null) {
-            if (credential instanceof CertificateCredential) {
-
-                log.info("Successfully found credentials for token id - " + getRequestData().getTokenId() +
-                        " gateway id - " + getRequestData().getGatewayId());
-
-                CertificateCredential certificateCredential = (CertificateCredential) credential;
-
-                X509Certificate[] certificates = certificateCredential.getCertificates();
-                X509Credential newCredential = new X509Credential(certificateCredential.getPrivateKey(), certificates);
-
-                GlobusGSSCredentialImpl cred = new GlobusGSSCredentialImpl(newCredential, GSSCredential.INITIATE_AND_ACCEPT);
-                System.out.print(cred.export(ExtendedGSSCredential.IMPEXP_OPAQUE));
-                return cred;
-                //return new GlobusGSSCredentialImpl(newCredential,
-                //        GSSCredential.INITIATE_AND_ACCEPT);
-            } else {
-                log.info("Credential type is not CertificateCredential. Cannot create mapping globus credentials. " +
-                        "Credential type - " + credential.getClass().getName());
-            }
-        } else {
-            log.info("Could not find credentials for token - " + getRequestData().getTokenId() + " and "
-                    + "gateway id - " + getRequestData().getGatewayId());
-        }
-
-        return null;
-    }
-
-    /**
-     * Gets the default proxy certificate.
-     * @return Default my proxy credentials.
-     * @throws org.apache.airavata.gfac.GFacException If an error occurred while retrieving credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential getDefaultCredentials() throws GFacException, ApplicationSettingsException{
-        MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
-        try {
-            return myproxy.get(getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
-                    getRequestData().getMyProxyLifeTime());
-        } catch (MyProxyException e) {
-            throw new GFacException("An error occurred while retrieving default security credentials.", e);
-        }
-    }
-
-    /**
-     * Gets a new proxy certificate given current credentials.
-     * @return The short lived GSSCredentials
-     * @throws org.apache.airavata.gfac.GFacException If an error is occurred while retrieving credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential getProxyCredentials() throws GFacException, ApplicationSettingsException {
-
-        MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
-        try {
-            return myproxy.get(gssCredentials, getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
-                    getRequestData().getMyProxyLifeTime());
-        } catch (MyProxyException e) {
-            throw new GFacException("An error occurred while renewing security credentials using user/password.", e);
-        }
-    }
-
-    /**
-     * Renew GSSCredentials.
-     * Before executing we need to add current host as a trusted renewer. Note to renew credentials
-     * we dont need user name and password.
-     * To do that execute following command
-     * > myproxy-logon -t <LIFETIME></LIFETIME> -s <MY PROXY SERVER> -l <USER NAME>
-     * E.g :- > myproxy-logon -t 264 -s myproxy.teragrid.org -l us3
-     *          Enter MyProxy pass phrase:
-     *          A credential has been received for user us3 in /tmp/x509up_u501.
-     * > myproxy-init -A --cert /tmp/x509up_u501 --key /tmp/x509up_u501 -l ogce -s myproxy.teragrid.org
-     * @return  Renewed credentials.
-     * @throws org.apache.airavata.gfac.GFacException If an error occurred while renewing credentials.
-     * @throws org.apache.airavata.common.exception.ApplicationSettingsException
-     */
-    public GSSCredential renewCredentialsAsATrustedHost() throws GFacException, ApplicationSettingsException {
-        MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
-        GetParams getParams = new GetParams();
-        getParams.setAuthzCreds(gssCredentials);
-        getParams.setUserName(getRequestData().getMyProxyUserName());
-        getParams.setLifetime(getRequestData().getMyProxyLifeTime());
-        try {
-            return myproxy.get(gssCredentials, getParams);
-        } catch (MyProxyException e) {
-            throw new GFacException("An error occurred while renewing security credentials.", e);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramJobSubmissionListener.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramJobSubmissionListener.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramJobSubmissionListener.java
deleted file mode 100644
index 782eee2..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramJobSubmissionListener.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.util;
-
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.notification.events.StatusChangeEvent;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.globus.gram.GramJob;
-import org.globus.gram.GramJobListener;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GramJobSubmissionListener implements GramJobListener{
-    private final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
-
-    public static final int NO_ERROR = -42;
-    public static final int INITIAL_STATUS = -43;
-
-    private volatile boolean jobDone = false;
-    private volatile int error = NO_ERROR;
-    private int currentStatus = INITIAL_STATUS;
-
-    private JobExecutionContext context;
-    private GramJob job;
-
-    public GramJobSubmissionListener(GramJob job, JobExecutionContext context) {
-        this.job = job;
-        this.context = context;
-    }
-
-    /**
-     * This method is used to block the process until the currentStatus of the job is DONE or FAILED
-     */
-    public void waitFor()  {
-        while (!isJobDone()) {
-            synchronized (this) {
-                try {
-                    wait();
-                } catch (InterruptedException e) {}
-            }
-        }
-    }
-
-
-    
-    private synchronized boolean isJobDone() {
-        return this.jobDone;
-    }
-
-    private void setStatus(int status, int error) {
-		try {
-			GFacUtils.updateJobStatus(context, context.getJobDetails(), GramProviderUtils.getApplicationJobStatus(status));
-		} catch (GFacException e) {
-			log.error("Error persisting status" + e.getLocalizedMessage(), e);
-		}
-        this.currentStatus = status;
-        this.error = error;
-
-        switch (this.currentStatus) {
-        case GramJob.STATUS_FAILED:
-            log.info("Job Error Code: " + error);
-            this.jobDone = true;
-            notifyAll();
-        case GramJob.STATUS_DONE:
-            this.jobDone = true;
-            notifyAll();
-        }
-
-    }
-
-    public synchronized void statusChanged(GramJob job) {
-
-        int jobStatus = job.getStatus();
-        String jobStatusMessage = "Status of job " + job.getIDAsString() + "is " + job.getStatusAsString();
-        /*
-         * Notify currentStatus change
-         */
-        this.context.getNotifier().publish(new StatusChangeEvent(jobStatusMessage));
-
-        /*
-         * Set new currentStatus if it is jobDone, notify all wait object
-         */
-        if (currentStatus != jobStatus) {
-            currentStatus = jobStatus;
-
-            setStatus(job.getStatus(), job.getError());
-
-            // Test to see whether we need to renew credentials
-            renewCredentials(job);
-        }
-    }
-
-    private void renewCredentials(GramJob job) {
-
-        try {
-
-            int proxyExpTime = job.getCredentials().getRemainingLifetime();
-            if (proxyExpTime < GSISecurityContext.CREDENTIAL_RENEWING_THRESH_HOLD) {
-                log.info("Job proxy expired. Trying to renew proxy");
-                GSSCredential gssCred = ((GSISecurityContext)context.
-                        getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).renewCredentials();
-                job.renew(gssCred);
-                log.info("MyProxy credentials are renewed .");
-            }
-
-        } catch (Exception e) {
-            log.error("An error occurred while trying to renew credentials. Job id " + job.getIDAsString());
-        }
-
-
-    }
-
-    public synchronized int getError() {
-        return error;
-    }
-
-    public synchronized int getCurrentStatus() {
-        return currentStatus;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramProviderUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramProviderUtils.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramProviderUtils.java
deleted file mode 100644
index 8dfe78f..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramProviderUtils.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.util;
-
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.globus.gram.GramAttributes;
-import org.globus.gram.GramJob;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-
-public class GramProviderUtils {
-    private static final Logger log = LoggerFactory.getLogger(GramProviderUtils.class);
-
-    public static GramJob setupEnvironment(JobExecutionContext jobExecutionContext, boolean enableTwoPhase) throws GFacProviderException {
-        log.debug("Searching for Gate Keeper");
-        try {
-            GramAttributes jobAttr = GramRSLGenerator.configureRemoteJob(jobExecutionContext);
-            String rsl = jobAttr.toRSL();
-
-            if (enableTwoPhase) {
-                rsl = rsl + "(twoPhase=yes)";
-            }
-
-            log.debug("RSL = " + rsl);
-            GramJob job = new GramJob(rsl);
-            return job;
-        } catch (ToolsException te) {
-            throw new GFacProviderException(te.getMessage(), te);
-        }
-    }
-
-     public static JobState getApplicationJobStatus(int gramStatus) {
-        switch (gramStatus) {
-            case GramJob.STATUS_UNSUBMITTED:
-                return JobState.HELD;
-            case GramJob.STATUS_ACTIVE:
-                return JobState.ACTIVE;
-            case GramJob.STATUS_DONE:
-                return JobState.COMPLETE;
-            case GramJob.STATUS_FAILED:
-                return JobState.FAILED;
-            case GramJob.STATUS_PENDING:
-                return JobState.QUEUED;
-            case GramJob.STATUS_STAGE_IN:
-                return JobState.QUEUED;
-            case GramJob.STATUS_STAGE_OUT:
-                return JobState.COMPLETE;
-            case GramJob.STATUS_SUSPENDED:
-                return JobState.SUSPENDED;
-            default:
-                return JobState.UNKNOWN;
-        }
-    }
-
-    public static URI createGsiftpURI(String host, String localPath) throws URISyntaxException {
-        StringBuffer buf = new StringBuffer();
-        if (!host.startsWith("gsiftp://"))
-            buf.append("gsiftp://");
-        buf.append(host);
-        if (!host.endsWith("/"))
-            buf.append("/");
-        buf.append(localPath);
-        return new URI(buf.toString());
-    }
-
-     public static URI createGsiftpURI(GridFTPContactInfo host, String localPath) throws URISyntaxException {
-        StringBuffer buf = new StringBuffer();
-
-        if (!host.hostName.startsWith("gsiftp://"))
-            buf.append("gsiftp://");
-        buf.append(host).append(":").append(host.port);
-        if (!host.hostName.endsWith("/"))
-            buf.append("/");
-        buf.append(localPath);
-        return new URI(buf.toString());
-    }
-
-    public static String createGsiftpURIAsString(String host, String localPath) throws URISyntaxException {
-        StringBuffer buf = new StringBuffer();
-        if (!host.startsWith("gsiftp://"))
-            buf.append("gsiftp://");
-        buf.append(host);
-        if (!host.endsWith("/"))
-            buf.append("/");
-        buf.append(localPath);
-        return buf.toString();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramRSLGenerator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramRSLGenerator.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramRSLGenerator.java
deleted file mode 100644
index 8ec4cc8..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GramRSLGenerator.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.util;
-
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.*;
-import org.globus.gram.GramAttributes;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-public class GramRSLGenerator {
-    protected static final Logger log = LoggerFactory.getLogger(GramRSLGenerator.class);
-
-    private enum JobType {
-        SERIAL, SINGLE, MPI, MULTIPLE, CONDOR
-    }
-
-    ;
-
-    public static GramAttributes  configureRemoteJob(JobExecutionContext context) throws ToolsException {
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) context.getApplicationContext().getApplicationDeploymentDescription().getType();
-        GramAttributes jobAttr = new GramAttributes();
-        jobAttr.setExecutable(app.getExecutableLocation());
-        jobAttr.setDirectory(app.getStaticWorkingDirectory());
-        jobAttr.setStdout(app.getStandardOutput());
-        jobAttr.setStderr(app.getStandardError());
-        /*
-         * The env here contains the env of the host and the application. i.e the env specified in the host description
-         * and application description documents
-         */
-        NameValuePairType[] env = app.getApplicationEnvironmentArray();
-        if (env.length != 0) {
-            Map<String, String> nv = new HashMap<String, String>();
-            for (int i = 0; i < env.length; i++) {
-                String key = env[i].getName();
-                String value = env[i].getValue();
-                nv.put(key, value);
-            }
-
-            for (Map.Entry<String, String> entry : nv.entrySet()) {
-                jobAttr.addEnvVariable(entry.getKey(), entry.getValue());
-            }
-        }
-        jobAttr.addEnvVariable(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
-        jobAttr.addEnvVariable(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());
-
-    
-
-        if (app.getStandardInput() != null && !"".equals(app.getStandardInput())) {
-            jobAttr.setStdin(app.getStandardInput());
-        } else {
-            MessageContext input = context.getInMessageContext();;
-            Map<String,Object> inputs = input.getParameters();
-            Set<String> keys = inputs.keySet();
-            for (String paramName : keys ) {
-             	ActualParameter actualParameter = (ActualParameter) inputs.get(paramName);
-                if ("URIArray".equals(actualParameter.getType().getType().toString()) || "StringArray".equals(actualParameter.getType().getType().toString())
-                        || "FileArray".equals(actualParameter.getType().getType().toString())) {
-                    String[] values = null;
-                    if (actualParameter.getType() instanceof URIArrayType) {
-                        values = ((URIArrayType) actualParameter.getType()).getValueArray();
-                    } else if (actualParameter.getType() instanceof StringArrayType) {
-                        values = ((StringArrayType) actualParameter.getType()).getValueArray();
-                    } else if (actualParameter.getType() instanceof FileArrayType) {
-                        values = ((FileArrayType) actualParameter.getType()).getValueArray();
-                    }
-                    String value = StringUtil.createDelimiteredString(values, " ");
-                    jobAttr.addArgument(value);
-                } else {
-                    String paramValue = MappingFactory.toString(actualParameter);
-                    jobAttr.addArgument(paramValue);
-                }
-            }
-        }
-        // Using the workflowContext Header values if user provided them in the request and overwrite the default values in DD
-        //todo finish the scheduling based on workflow execution context
-        TaskDetails taskData = context.getTaskData();
-        if(taskData != null && taskData.isSetTaskScheduling()){
-        	 ComputationalResourceScheduling computionnalResource = taskData.getTaskScheduling();
-                try {
-                    int cpuCount = computionnalResource.getTotalCPUCount();
-                    if(cpuCount>0){
-                        app.setCpuCount(cpuCount);
-                    }
-                } catch (NullPointerException e) {
-                    log.debug("No Value sent in WorkflowContextHeader for CPU Count, value in the Deployment Descriptor will be used");
-                    new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
-                }
-                try {
-                    int nodeCount = computionnalResource.getNodeCount();
-                    if(nodeCount>0){
-                        app.setNodeCount(nodeCount);
-                    }
-                } catch (NullPointerException e) {
-                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
-                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
-                }
-                try {
-                    String queueName = computionnalResource.getQueueName();
-                    if (queueName != null) {
-                        if(app.getQueue() == null){
-                            QueueType queueType = app.addNewQueue();
-                            queueType.setQueueName(queueName);
-                        }else{
-                            app.getQueue().setQueueName(queueName);
-                        }
-                    }
-                } catch (NullPointerException e) {
-                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
-                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
-                }
-                try {
-                    int maxwallTime = computionnalResource.getWallTimeLimit();
-                    if(maxwallTime>0){
-                        app.setMaxWallTime(maxwallTime);
-                    }
-                } catch (NullPointerException e) {
-                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
-                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
-                }
-        }
-        if (app.getNodeCount() > 0) {
-            jobAttr.set("hostCount", String.valueOf(app.getNodeCount()));
-            log.debug("Setting number of Nodes to " + app.getCpuCount());
-        }
-        if (app.getCpuCount() > 0) {
-            log.debug("Setting number of procs to " + app.getCpuCount());
-            jobAttr.setNumProcs(app.getCpuCount());
-        }
-        if (app.getMinMemory() > 0) {
-            log.debug("Setting minimum memory to " + app.getMinMemory());
-            jobAttr.setMinMemory(app.getMinMemory());
-        }
-        if (app.getMaxMemory() > 0) {
-            log.debug("Setting maximum memory to " + app.getMaxMemory());
-            jobAttr.setMaxMemory(app.getMaxMemory());
-        }
-        if (app.getProjectAccount() != null) {
-            if (app.getProjectAccount().getProjectAccountNumber() != null) {
-                log.debug("Setting project to " + app.getProjectAccount().getProjectAccountNumber());
-                jobAttr.setProject(app.getProjectAccount().getProjectAccountNumber());
-            }
-        }
-        if (app.getQueue() != null) {
-            if (app.getQueue().getQueueName() != null) {
-                log.debug("Setting job queue to " + app.getQueue().getQueueName());
-                jobAttr.setQueue(app.getQueue().getQueueName());
-            }
-        }
-        if (app.getMaxWallTime() > 0) {
-            log.debug("Setting max wall clock time to " + app.getMaxWallTime());
-
-            jobAttr.setMaxWallTime(app.getMaxWallTime());
-            jobAttr.set("proxy_timeout", "1");
-        } else {
-            jobAttr.setMaxWallTime(30);
-        }
-        String jobType = JobType.SINGLE.toString();
-        if (app.getJobType() != null) {
-            jobType = app.getJobType().toString();
-        }
-        if (jobType.equalsIgnoreCase(JobType.SINGLE.toString())) {
-            log.debug("Setting job type to single");
-            jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
-        } if (jobType.equalsIgnoreCase(JobType.SERIAL.toString())) {
-            log.debug("Setting job type to single");
-            jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
-        } else if (jobType.equalsIgnoreCase(JobType.MPI.toString())) {
-            log.debug("Setting job type to mpi");
-            jobAttr.setJobType(GramAttributes.JOBTYPE_MPI);
-        } else if (jobType.equalsIgnoreCase(JobType.MULTIPLE.toString())) {
-            log.debug("Setting job type to multiple");
-            jobAttr.setJobType(GramAttributes.JOBTYPE_MULTIPLE);
-        } else if (jobType.equalsIgnoreCase(JobType.CONDOR.toString())) {
-            jobAttr.setJobType(GramAttributes.JOBTYPE_CONDOR);
-        }
-
-        return jobAttr;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GridFTPContactInfo.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GridFTPContactInfo.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GridFTPContactInfo.java
deleted file mode 100644
index f331b0e..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/util/GridFTPContactInfo.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.util;
-
-import org.apache.airavata.gfac.Constants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GridFTPContactInfo {
-    protected final static Logger log = LoggerFactory.getLogger(GridFTPContactInfo.class);
-    public String hostName;
-    public int port;
-
-    public GridFTPContactInfo(String hostName, int port) {
-        if (port <= 0 || port == 80) {
-            log.debug(hostName + "port recived " + port + " setting it to " + Constants.DEFAULT_GSI_FTP_PORT);
-            port = Constants.DEFAULT_GSI_FTP_PORT;
-        }
-        this.hostName = hostName;
-        this.port = port;
-    }
-
-    @Override
-    public boolean equals(Object obj) {
-        if (obj instanceof GridFTPContactInfo) {
-            return hostName.equals(((GridFTPContactInfo) obj).hostName) && port == ((GridFTPContactInfo) obj).port;
-        } else {
-            return false;
-        }
-    }
-
-    @Override
-    public int hashCode() {
-        return hostName.hashCode();
-    }
-
-    @Override
-    public String toString() {
-        StringBuffer buf = new StringBuffer();
-        buf.append(hostName).append(":").append(port);
-        return buf.toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/resources/errors.properties b/modules/gfac/gfac-gram/src/main/resources/errors.properties
deleted file mode 100644
index 88c41b8..0000000
--- a/modules/gfac/gfac-gram/src/main/resources/errors.properties
+++ /dev/null
@@ -1,197 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# Directly copied from jglobus. Not a good way to manager error properties.
-1 = Parameter not supported
-2 = The RSL length is greater than the maximum allowed
-3 = No resources available
-4 = Bad directory specified
-5 = The executable does not exist
-6 = Insufficient funds
-7 = Authentication with the remote server failed
-8 = Job cancelled by user
-9 = Job cancelled by system
-
-10 = Data transfer to the server failed
-11 = The stdin file does not exist
-12 = The connection to the server failed (check host and port)
-13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
-14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
-15 = The job manager received an invalid RSL
-16 = Could not connect to job manager
-17 = The job failed when the job manager attempted to run it
-18 = Paradyn error
-19 = The provided RSL 'jobtype' value is invalid
-
-20 = The provided RSL 'myjob' value is invalid
-21 = The job manager failed to locate an internal script argument file
-22 = The job manager failed to create an internal script argument file
-23 = The job manager detected an invalid job state
-24 = The job manager detected an invalid script response
-25 = The job manager detected an invalid job state
-26 = The provided RSL 'jobtype' value is not supported by this job manager
-27 = Unimplemented
-28 = The job manager failed to create an internal script submission file
-29 = The job manager cannot find the user proxy
-
-30 = The job manager failed to open the user proxy
-31 = The job manager failed to cancel the job as requested
-32 = System memory allocation failed
-33 = The interprocess job communication initialization failed
-34 = The interprocess job communication setup failed
-35 = The provided RSL 'host count' value is invalid
-36 = One of the provided RSL parameters is unsupported
-37 = The provided RSL 'queue' parameter is invalid
-38 = The provided RSL 'project' parameter is invalid
-39 = The provided RSL string includes variables that could not be identified
-
-40 = The provided RSL 'environment' parameter is invalid
-41 = The provided RSL 'dryrun' parameter is invalid
-42 = The provided RSL is invalid (an empty string)
-43 = The job manager failed to stage the executable
-44 = The job manager failed to stage the stdin file
-45 = The requested job manager type is invalid
-46 = The provided RSL 'arguments' parameter is invalid
-47 = The gatekeeper failed to run the job manager
-48 = The provided RSL could not be properly parsed
-49 = There is a version mismatch between GRAM components
-
-50 = The provided RSL 'arguments' parameter is invalid
-51 = The provided RSL 'count' parameter is invalid
-52 = The provided RSL 'directory' parameter is invalid
-53 = The provided RSL 'dryrun' parameter is invalid
-54 = The provided RSL 'environment' parameter is invalid
-55 = The provided RSL 'executable' parameter is invalid
-56 = The provided RSL 'host_count' parameter is invalid
-57 = The provided RSL 'jobtype' parameter is invalid
-58 = The provided RSL 'maxtime' parameter is invalid
-59 = The provided RSL 'myjob' parameter is invalid
-
-60 = The provided RSL 'paradyn' parameter is invalid
-61 = The provided RSL 'project' parameter is invalid
-62 = The provided RSL 'queue' parameter is invalid
-63 = The provided RSL 'stderr' parameter is invalid
-64 = The provided RSL 'stdin' parameter is invalid
-65 = The provided RSL 'stdout' parameter is invalid
-66 = The job manager failed to locate an internal script
-67 = The job manager failed on the system call pipe()
-68 = The job manager failed on the system call fcntl()
-69 = The job manager failed to create the temporary stdout filename
-
-70 = The job manager failed to create the temporary stderr filename
-71 = The job manager failed on the system call fork()
-72 = The executable file permissions do not allow execution
-73 = The job manager failed to open stdout
-74 = The job manager failed to open stderr
-75 = The cache file could not be opened in order to relocate the user proxy
-76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
-77 = The job manager failed to insert the contact in the client contact list
-78 = The contact was not found in the job manager's client contact list
-79 = Connecting to the job manager failed.  Possible reasons: job terminated, invalid job contact, network problems, ...
-
-80 = The syntax of the job contact is invalid
-81 = The executable parameter in the RSL is undefined
-82 = The job manager service is misconfigured.  condor arch undefined
-83 = The job manager service is misconfigured.  condor os undefined
-84 = The provided RSL 'min_memory' parameter is invalid
-85 = The provided RSL 'max_memory' parameter is invalid
-86 = The RSL 'min_memory' value is not zero or greater
-87 = The RSL 'max_memory' value is not zero or greater
-88 = The creation of a HTTP message failed
-89 = Parsing incoming HTTP message failed
-
-90 = The packing of information into a HTTP message failed
-91 = An incoming HTTP message did not contain the expected information
-92 = The job manager does not support the service that the client requested
-93 = The gatekeeper failed to find the requested service
-94 = The jobmanager does not accept any new requests (shutting down)
-95 = The client failed to close the listener associated with the callback URL
-96 = The gatekeeper contact cannot be parsed
-97 = The job manager could not find the 'poe' command
-98 = The job manager could not find the 'mpirun' command
-99 = The provided RSL 'start_time' parameter is invalid"
-100 = The provided RSL 'reservation_handle' parameter is invalid
-
-101 = The provided RSL 'max_wall_time' parameter is invalid
-102 = The RSL 'max_wall_time' value is not zero or greater
-103 = The provided RSL 'max_cpu_time' parameter is invalid
-104 = The RSL 'max_cpu_time' value is not zero or greater
-105 = The job manager is misconfigured, a scheduler script is missing
-106 = The job manager is misconfigured, a scheduler script has invalid permissions
-107 = The job manager failed to signal the job
-108 = The job manager did not recognize/support the signal type
-109 = The job manager failed to get the job id from the local scheduler
-
-110 = The job manager is waiting for a commit signal
-111 = The job manager timed out while waiting for a commit signal
-112 = The provided RSL 'save_state' parameter is invalid
-113 = The provided RSL 'restart' parameter is invalid
-114 = The provided RSL 'two_phase' parameter is invalid
-115 = The RSL 'two_phase' value is not zero or greater
-116 = The provided RSL 'stdout_position' parameter is invalid
-117 = The RSL 'stdout_position' value is not zero or greater
-118 = The provided RSL 'stderr_position' parameter is invalid
-119 = The RSL 'stderr_position' value is not zero or greater
-
-120 = The job manager restart attempt failed
-121 = The job state file doesn't exist
-122 = Could not read the job state file
-123 = Could not write the job state file
-124 = The old job manager is still alive
-125 = The job manager state file TTL expired
-126 = It is unknown if the job was submitted
-127 = The provided RSL 'remote_io_url' parameter is invalid
-128 = Could not write the remote io url file
-129 = The standard output/error size is different
-
-130 = The job manager was sent a stop signal (job is still running)
-131 = The user proxy expired (job is still running)
-132 = The job was not submitted by original jobmanager
-133 = The job manager is not waiting for that commit signal
-134 = The provided RSL scheduler specific parameter is invalid
-135 = The job manager could not stage in a file
-136 = The scratch directory could not be created
-137 = The provided 'gass_cache' parameter is invalid
-138 = The RSL contains attributes which are not valid for job submission
-139 = The RSL contains attributes which are not valid for stdio update
-
-140 = The RSL contains attributes which are not valid for job restart
-141 = The provided RSL 'file_stage_in' parameter is invalid
-142 = The provided RSL 'file_stage_in_shared' parameter is invalid
-143 = The provided RSL 'file_stage_out' parameter is invalid
-144 = The provided RSL 'gass_cache' parameter is invalid
-145 = The provided RSL 'file_cleanup' parameter is invalid
-146 = The provided RSL 'scratch_dir' parameter is invalid
-147 = The provided scheduler-specific RSL parameter is invalid
-148 = A required RSL attribute was not defined in the RSL spec
-149 = The gass_cache attribute points to an invalid cache directory
-
-150 = The provided RSL 'save_state' parameter has an invalid value
-151 = The job manager could not open the RSL attribute validation file
-152 = The  job manager could not read the RSL attribute validation file
-153 = The provided RSL 'proxy_timeout' is invalid
-154 = The RSL 'proxy_timeout' value is not greater than zero
-155 = The job manager could not stage out a file
-156 = The job contact string does not match any which the job manager is handling
-157 = Proxy delegation failed
-158 = The job manager could not lock the state lock file
-
-1000 = Failed to start up callback handler
-1003 = Job contact not set

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/resources/service.properties b/modules/gfac/gfac-gram/src/main/resources/service.properties
deleted file mode 100644
index 391bfea..0000000
--- a/modules/gfac/gfac-gram/src/main/resources/service.properties
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-
-#
-# Class which implemented Scheduler interface. It will be used to determine a Provider
-#
-scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
-
-#
-# Data Service Plugins classes
-#
-datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
-
-#
-# Pre execution Plugins classes. For example, GridFTP Input Staging
-#
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
-
-#
-# Post execution Plugins classes. For example, GridFTP Output Staging
-#
-postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
-postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
-
-#
-# SSH private key location. It will be used by SSHProvider
-#
-# ssh.key=/home/user/.ssh/id_rsa
-# ssh.keypass=
-# ssh.username=usernameAtHost
-
-#
-# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
-#
-# myproxy.server=myproxy.teragrid.org
-# myproxy.user=username
-# myproxy.pass=password
-# myproxy.life=3600
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java b/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
deleted file mode 100644
index a5960d4..0000000
--- a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.core.gfac.services.impl;
-
-import junit.framework.Assert;
-import org.apache.airavata.common.utils.AiravataUtils;
-import org.apache.airavata.common.utils.DatabaseTestCases;
-import org.apache.airavata.common.utils.DerbyUtil;
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.credential.store.store.impl.CredentialReaderImpl;
-import org.apache.airavata.gfac.RequestData;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.apache.log4j.Logger;
-import org.junit.BeforeClass;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 7/11/13
- * Time: 1:31 AM
- */
-
-public class GFacBaseTestWithMyProxyAuth extends DatabaseTestCases {
-
-    private static String myProxyUserName;
-    private static String myProxyPassword;
-
-    private static final Logger log = Logger.getLogger(GFacBaseTestWithMyProxyAuth.class);
-
-
-    @BeforeClass
-    public static void setUpClass() throws Exception {
-        AiravataUtils.setExecutionAsServer();
-
-        myProxyUserName = System.getProperty("myproxy.user");
-        myProxyPassword = System.getProperty("myproxy.password");
-
-        if (userName == null || password == null || userName.trim().equals("") || password.trim().equals("")) {
-            log.error("===== Please set myproxy.user and myproxy.password system properties. =======");
-            Assert.fail("Please set myproxy.user and myproxy.password system properties.");
-        }
-
-        log.info("Using my proxy user name - " + userName);
-
-        setUpDatabase();
-
-    }
-
-
-
-
-    public static void setUpDatabase() throws Exception {
-        DerbyUtil.startDerbyInServerMode(getHostAddress(), getPort(), getUserName(), getPassword());
-
-        waitTillServerStarts();
-
-        /*
-         * String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n" + "        GATEWAY_NAME VARCHAR(256) NOT NULL,\n" +
-         * "        COMMUNITY_USER_NAME VARCHAR(256) NOT NULL,\n" + "        CREDENTIAL BLOB NOT NULL,\n" +
-         * "        PRIVATE_KEY BLOB NOT NULL,\n" + "        NOT_BEFORE VARCHAR(256) NOT NULL,\n" +
-         * "        NOT_AFTER VARCHAR(256) NOT NULL,\n" + "        LIFETIME INTEGER NOT NULL,\n" +
-         * "        REQUESTING_PORTAL_USER_NAME VARCHAR(256) NOT NULL,\n" +
-         * "        REQUESTED_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',\n" +
-         * "        PRIMARY KEY (GATEWAY_NAME, COMMUNITY_USER_NAME)\n" + ")";
-         */
-
-        String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n"
-                + "        GATEWAY_ID VARCHAR(256) NOT NULL,\n"
-                + "        TOKEN_ID VARCHAR(256) NOT NULL,\n"
-                + // Actual token used to identify the credential
-                "        CREDENTIAL BLOB NOT NULL,\n" + "        PORTAL_USER_ID VARCHAR(256) NOT NULL,\n"
-                + "        TIME_PERSISTED TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n"
-                + "        PRIMARY KEY (GATEWAY_ID, TOKEN_ID)\n" + ")";
-
-        String dropTable = "drop table CREDENTIALS";
-
-        try {
-            executeSQL(dropTable);
-        } catch (Exception e) {
-        }
-
-        executeSQL(createTable);
-
-    }
-
-    public GSISecurityContext getSecurityContext() throws Exception {
-        GSISecurityContext.setUpTrustedCertificatePath(System.getProperty("gsi.certificate.path"));
-        RequestData requestData = new RequestData();
-        requestData.setMyProxyServerUrl("myproxy.teragrid.org");
-        requestData.setMyProxyUserName(System.getProperty("myproxy.user"));
-        requestData.setMyProxyPassword(System.getProperty("myproxy.password"));
-        requestData.setMyProxyLifeTime(3600);
-        CredentialReader credentialReader = new CredentialReaderImpl(getDbUtil());
-        return new GSISecurityContext(credentialReader, requestData);
-    }
-
-}
\ No newline at end of file


[3/4] airavata git commit: removed gfac-ec2, gfac-gram and gfac-hadoop modules from source.

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java b/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
deleted file mode 100644
index 9f86197..0000000
--- a/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
+++ /dev/null
@@ -1,195 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-// */
-//
-//package org.apache.airavata.gfac.ec2;
-//
-//import org.airavata.appcatalog.cpi.AppCatalog;
-//import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
-//import org.apache.airavata.commons.gfac.type.*;
-//import org.apache.airavata.gfac.GFacConfiguration;
-//import org.apache.airavata.gfac.GFacException;
-//import org.apache.airavata.gfac.core.context.ApplicationContext;
-//import org.apache.airavata.gfac.core.context.JobExecutionContext;
-//import org.apache.airavata.gfac.core.context.MessageContext;
-//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-//import org.apache.airavata.model.appcatalog.computeresource.*;
-//import org.apache.airavata.schemas.gfac.*;
-//import org.junit.Assert;
-//import org.junit.Before;
-//import org.junit.Test;
-//
-//import java.io.File;
-//import java.net.URL;
-//import java.util.ArrayList;
-//import java.util.List;
-//
-///**
-// * Your Amazon instance should be in a running state before running this test.
-// */
-//public class EC2ProviderTest {
-//    private JobExecutionContext jobExecutionContext;
-//
-//    private static final String hostName = "ec2-host";
-//
-//    private static final String hostAddress = "ec2-address";
-//
-//    private static final String sequence1 = "RR042383.21413#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
-//            "CTCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCCTGCGCCCATTGACCAATATTCCTCA" +
-//            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
-//            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAATCTTCCTTTCAGAAG" +
-//            "GCTGTCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCCGCCGGTCGCCATCAGTCTTAGCAAGCTAAGACCATGCTGCCCCTGACTTGCATGT" +
-//            "GTTAAGCCTGTAGCTTAGCGTTC";
-//
-//    private static final String sequence2 = "RR042383.31934#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
-//            "CCCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCTCTCGCCCATTGACCAATATTCCTCA" +
-//            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
-//            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAAATCTTCCTTTCAGAA" +
-//            "GGCTATCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCG";
-//
-//    /* Following variables are needed to be set in-order to run the test. Since these are account specific information,
-//       I'm not adding the values here. It's the responsibility of the person who's running the test to update
-//       these variables accordingly.
-//       */
-//
-//    /* Username used to log into your ec2 instance eg.ec2-user */
-//    private String userName = "";
-//
-//    /* Secret key used to connect to the image */
-//    private String secretKey = "";
-//
-//    /* Access key used to connect to the image */
-//    private String accessKey = "";
-//
-//    /* Instance id of the running instance of your image */
-//    private String instanceId = "";
-//
-//    @Before
-//    public void setUp() throws Exception {
-//        URL resource = EC2ProviderTest.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-//        assert resource != null;
-//        System.out.println(resource.getFile());
-//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
-//
-//        /* EC2 Host */
-//        ComputeResourceDescription host = new ComputeResourceDescription();
-//        host.setHostName(hostName);
-//        host.setResourceDescription("EC2 compute resource");
-//        host.addToIpAddresses(hostAddress);
-//
-//        CloudJobSubmission cloudJobSubmission = new CloudJobSubmission();
-//        cloudJobSubmission.setProviderName(ProviderName.EC2);
-//        cloudJobSubmission.setExecutableType("sh");
-//        cloudJobSubmission.setNodeId(instanceId);
-//        cloudJobSubmission.setSecurityProtocol(SecurityProtocol.USERNAME_PASSWORD);
-//        cloudJobSubmission.setUserAccountName(userName);
-//
-//        AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
-//        String submissionId = appCatalog.getComputeResource().addCloudJobSubmission(cloudJobSubmission);
-//
-//        JobSubmissionInterface submissionInterface = new JobSubmissionInterface();
-//        submissionInterface.setJobSubmissionInterfaceId(submissionId);
-//        submissionInterface.setJobSubmissionProtocol(JobSubmissionProtocol.CLOUD);
-//        submissionInterface.setPriorityOrder(0);
-//
-//        host.addToJobSubmissionInterfaces(submissionInterface);
-//
-//        String computeResourceId = appCatalog.getComputeResource().addComputeResource(host);
-//
-//        /* App */
-//
-//        ApplicationDescription ec2Desc = new ApplicationDescription(Ec2ApplicationDeploymentType.type);
-//        Ec2ApplicationDeploymentType ec2App = (Ec2ApplicationDeploymentType)ec2Desc.getType();
-//
-//        String serviceName = "Gnome_distance_calculation_workflow";
-//        ec2Desc.getType().addNewApplicationName().setStringValue(serviceName);
-//        ec2App.setJobType(JobTypeType.EC_2);
-//        ec2App.setExecutable("/home/ec2-user/run.sh");
-//        ec2App.setExecutableType("sh");
-//
-//        /* Service */
-//        ServiceDescription serv = new ServiceDescription();
-//        serv.getType().setName("GenomeEC2");
-//
-//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-//
-//        InputParameterType input1 = InputParameterType.Factory.newInstance();
-//        input1.setParameterName("genome_input1");
-//        input1.setParameterType(StringParameterType.Factory.newInstance());
-//        inputList.add(input1);
-//
-//        InputParameterType input2 = InputParameterType.Factory.newInstance();
-//        input2.setParameterName("genome_input2");
-//        input2.setParameterType(StringParameterType.Factory.newInstance());
-//        inputList.add(input2);
-//
-//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
-//
-//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-//        OutputParameterType output = OutputParameterType.Factory.newInstance();
-//        output.setParameterName("genome_output");
-//        output.setParameterType(StringParameterType.Factory.newInstance());
-//        outputList.add(output);
-//
-//        OutputParameterType[] outputParamList = outputList
-//                .toArray(new OutputParameterType[outputList.size()]);
-//
-//        serv.getType().setInputParametersArray(inputParamList);
-//        serv.getType().setOutputParametersArray(outputParamList);
-//
-//        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-//        ApplicationContext applicationContext = new ApplicationContext();
-//        jobExecutionContext.setApplicationContext(applicationContext);
-//        applicationContext.setServiceDescription(serv);
-//        applicationContext.setApplicationDeploymentDescription(ec2Desc);
-//        applicationContext.setHostDescription(host);
-//
-//        AmazonSecurityContext amazonSecurityContext =
-//                new AmazonSecurityContext(userName, accessKey, secretKey, instanceId);
-//        jobExecutionContext.addSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT, amazonSecurityContext);
-//
-//        MessageContext inMessage = new MessageContext();
-//        ActualParameter genomeInput1 = new ActualParameter();
-//        ((StringParameterType)genomeInput1.getType()).setValue(sequence1);
-//        inMessage.addParameter("genome_input1", genomeInput1);
-//
-//        ActualParameter genomeInput2 = new ActualParameter();
-//        ((StringParameterType)genomeInput2.getType()).setValue(sequence2);
-//        inMessage.addParameter("genome_input2", genomeInput2);
-//
-//        MessageContext outMessage = new MessageContext();
-//        ActualParameter echo_out = new ActualParameter();
-//        outMessage.addParameter("distance", echo_out);
-//
-//        jobExecutionContext.setInMessageContext(inMessage);
-//        jobExecutionContext.setOutMessageContext(outMessage);
-//    }
-//
-//    @Test
-//    public void testGramProvider() throws GFacException {
-//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-//        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-//        Assert.assertEquals(MappingFactory.
-//                toString((ActualParameter) outMessageContext.getParameter("genome_output")), "476");
-//    }
-//}
-//
-//

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/test/resources/echo.bat
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/test/resources/echo.bat b/modules/gfac/gfac-ec2/src/test/resources/echo.bat
deleted file mode 100644
index c6b849b..0000000
--- a/modules/gfac/gfac-ec2/src/test/resources/echo.bat
+++ /dev/null
@@ -1,22 +0,0 @@
-::
-::
-:: Licensed to the Apache Software Foundation (ASF) under one
-:: or more contributor license agreements.  See the NOTICE file
-:: distributed with this work for additional information
-:: regarding copyright ownership.  The ASF licenses this file
-:: to you under the Apache License, Version 2.0 (the
-:: "License"); you may not use this file except in compliance
-:: with the License.  You may obtain a copy of the License at
-::
-::   http://www.apache.org/licenses/LICENSE-2.0
-::
-:: Unless required by applicable law or agreed to in writing,
-:: software distributed under the License is distributed on an
-:: "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-:: KIND, either express or implied.  See the License for the
-:: specific language governing permissions and limitations
-:: under the License.
-::
-::
-@echo off
-echo %1^=%2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/test/resources/logging.properties b/modules/gfac/gfac-ec2/src/test/resources/logging.properties
deleted file mode 100644
index 0584d38..0000000
--- a/modules/gfac/gfac-ec2/src/test/resources/logging.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#default/fallback log4j configuration
-#
-
-# Set root logger level to WARN and its only appender to A1.
-log4j.rootLogger=INFO, A1, A2
-
-# A1 is set to be a rolling file appender with default params
-log4j.appender.A1=org.apache.log4j.RollingFileAppender
-log4j.appender.A1.File=target/seclogs.txt
-
-# A1 uses PatternLayout.
-log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
-
-# A2 is a console appender
-log4j.appender.A2=org.apache.log4j.ConsoleAppender
-
-# A2 uses PatternLayout.
-log4j.appender.A2.layout=org.apache.log4j.PatternLayout
-log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
-
-log4j.logger.unicore.security=INFO
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/test/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/test/resources/service.properties b/modules/gfac/gfac-ec2/src/test/resources/service.properties
deleted file mode 100644
index e266d13..0000000
--- a/modules/gfac/gfac-ec2/src/test/resources/service.properties
+++ /dev/null
@@ -1,67 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-#
-# Properties for JCR Registry interface. By default, Apache Jackrabbit is used.
-#
-# org.apache.jackrabbit.repository.uri=http://localhost:8080/rmi
-# jcr.class=org.apache.jackrabbit.rmi.repository.RmiRepositoryFactory
-jcr.class=org.apache.jackrabbit.core.RepositoryFactoryImpl
-jcr.user=admin
-jcr.pass=admin
-
-
-#
-# Class which implemented Scheduler interface. It will be used to determine a Provider
-#
-scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
-
-#
-# Data Service Plugins classes
-#
-datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
-
-#
-# Pre execution Plugins classes. For example, GridFTP Input Staging
-#
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
-
-#
-# Post execution Plugins classes. For example, GridFTP Output Staging
-#
-postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
-postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
-
-#
-# SSH private key location. It will be used by SSHProvider
-#
-# ssh.key=/home/user/.ssh/id_rsa
-# ssh.keypass=
-# ssh.username=usernameAtHost
-
-#
-# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
-#
-# myproxy.server=myproxy.teragrid.org
-# myproxy.user=username
-# myproxy.pass=password
-# myproxy.life=3600
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/pom.xml b/modules/gfac/gfac-gram/pom.xml
deleted file mode 100644
index ac58e15..0000000
--- a/modules/gfac/gfac-gram/pom.xml
+++ /dev/null
@@ -1,124 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file 
-    distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under 
-    the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may 
-    obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to 
-    in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
-    ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under 
-    the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>org.apache.airavata</groupId>
-        <artifactId>gfac</artifactId>
-        <version>0.14-SNAPSHOT</version>
-        <relativePath>../pom.xml</relativePath>
-    </parent>
-
-    <modelVersion>4.0.0</modelVersion>
-    <artifactId>airavata-gfac-gram</artifactId>
-    <name>Airavata GFac GRAM implementation</name>
-    <description>This is the extension of GFAC to use GRAM</description>
-    <url>http://airavata.apache.org/</url>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.jglobus</groupId>
-            <artifactId>gss</artifactId>
-            <version>${jglobus.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.jglobus</groupId>
-            <artifactId>gram</artifactId>
-            <version>${jglobus.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.jglobus</groupId>
-            <artifactId>myproxy</artifactId>
-            <version>${jglobus.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.jglobus</groupId>
-            <artifactId>gridftp</artifactId>
-            <version>${jglobus.version}</version>
-        </dependency>
-
-        <!-- Logging -->
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-        <!-- GFAC schemas -->
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-gfac-core</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <!-- Credential Store -->
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-credential-store</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-server-configuration</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-client-configuration</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-
-        <!-- Test -->
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-            <version>6.1.1</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>jcl-over-slf4j</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <!-- gsi-ssh api dependencies -->
-
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-data-models</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.jcraft</groupId>
-            <artifactId>jsch</artifactId>
-            <version>0.1.50</version>
-        </dependency>
-        <dependency>
-            <groupId>org.ogce</groupId>
-            <artifactId>bcgss</artifactId>
-            <version>146</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.xmlbeans</groupId>
-            <artifactId>xmlbeans</artifactId>
-            <version>${xmlbeans.version}</version>
-        </dependency>
-
-    </dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/external/GridFtp.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/external/GridFtp.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/external/GridFtp.java
deleted file mode 100644
index fef9fad..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/external/GridFtp.java
+++ /dev/null
@@ -1,558 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gram.external;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.Vector;
-
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.gram.util.GramProviderUtils;
-import org.apache.airavata.gfac.gram.util.GridFTPContactInfo;
-import org.globus.ftp.DataChannelAuthentication;
-import org.globus.ftp.DataSourceStream;
-import org.globus.ftp.FileInfo;
-import org.globus.ftp.GridFTPClient;
-import org.globus.ftp.HostPort;
-import org.globus.ftp.Marker;
-import org.globus.ftp.MarkerListener;
-import org.globus.ftp.MlsxEntry;
-import org.globus.ftp.Session;
-import org.globus.ftp.exception.ClientException;
-import org.globus.ftp.exception.ServerException;
-import org.globus.gsi.gssapi.auth.HostAuthorization;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * GridFTP tools
- */
-public class GridFtp {
-    public static final Logger log = LoggerFactory.getLogger(GridFtp.class);
-
-    public static final String GSIFTP_SCHEME = "gsiftp";
-    public static final String HOST = "host";
-
-    /**
-     * Make directory at remote location
-     *
-     * @param destURI
-     * @param gssCred
-     * @throws ServerException
-     * @throws IOException
-     */
-    public void makeDir(URI destURI, GSSCredential gssCred) throws ToolsException {
-        GridFTPClient destClient = null;
-        GridFTPContactInfo destHost = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
-        try {
-
-            String destPath = destURI.getPath();
-            log.info(("Creating Directory = " + destHost + "=" + destPath));
-
-            destClient = new GridFTPClient(destHost.hostName, destHost.port);
-
-            int tryCount = 0;
-            while (true) {
-                try {
-                    destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-                    destClient.authenticate(gssCred);
-                    destClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-
-                    if (!destClient.exists(destPath)) {
-                        destClient.makeDir(destPath);
-                    }
-                    break;
-                } catch (ServerException e) {
-                    tryCount++;
-                    if (tryCount >= 3) {
-                        throw new ToolsException(e.getMessage(), e);
-                    }
-                    Thread.sleep(10000);
-                } catch (IOException e) {
-                    tryCount++;
-                    if (tryCount >= 3) {
-                        throw new ToolsException(e.getMessage(), e);
-                    }
-                    Thread.sleep(10000);
-                }
-            }
-        } catch (ServerException e) {
-            throw new ToolsException("Cannot Create GridFTP Client to:" + destHost.toString(), e);
-        } catch (IOException e) {
-            throw new ToolsException("Cannot Create GridFTP Client to:" + destHost.toString(), e);
-        } catch (InterruptedException e) {
-            throw new ToolsException("Internal Error cannot sleep", e);
-        } finally {
-            if (destClient != null) {
-                try {
-                    destClient.close();
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection",e);
-                }
-            }
-        }
-    }
-
-    /**
-     * Upload file from stream
-     *
-     * @param destURI
-     * @param gsCredential
-     * @param io
-     * @throws GFacException
-     */
-    public void uploadFile(URI destURI, GSSCredential gsCredential, InputStream io) throws ToolsException {
-        GridFTPClient ftpClient = null;
-        GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
-
-        try {
-
-            String remoteFile = destURI.getPath();
-            log.info("The remote file is " + remoteFile);
-
-            log.debug("Setup GridFTP Client");
-
-            ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
-            ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-            ftpClient.authenticate(gsCredential);
-            ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-
-            log.info("Uploading file");
-            if (checkBinaryExtensions(remoteFile)) {
-                log.debug("Transfer mode is set to Binary for a file upload");
-                ftpClient.setType(Session.TYPE_IMAGE);
-            }
-
-            ftpClient.put(remoteFile, new DataSourceStream(io), new MarkerListener() {
-                public void markerArrived(Marker marker) {
-                }
-            });
-
-            log.info("Upload file to:" + remoteFile + " is done");
-
-        } catch (ServerException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
-        } catch (IOException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
-        } catch (ClientException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
-        } finally {
-            if (ftpClient != null) {
-                try {
-                    ftpClient.close();
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection",e);
-                }
-            }
-        }
-    }
-
-    public void uploadFile(URI srcURI,  URI destURI, GSSCredential gsCredential) throws ToolsException {
-        GridFTPClient srcClient = null;
-        GridFTPContactInfo destContactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
-        GridFTPContactInfo srcContactInfo = new GridFTPContactInfo(srcURI.getHost(),srcURI.getPort());
-        try {
-            String remoteFile = destURI.getPath();
-            log.info("The remote file is " + remoteFile);
-            log.debug("Setup GridFTP Client");
-            srcClient = new GridFTPClient(srcContactInfo.hostName, srcContactInfo.port);
-            srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-            srcClient.authenticate(gsCredential);
-            srcClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-
-            GridFTPClient destClient = new GridFTPClient(destContactInfo.hostName, destContactInfo.port);
-            destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-            destClient.authenticate(gsCredential);
-            destClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-            log.debug("Uploading file");
-            if (checkBinaryExtensions(remoteFile)) {
-                log.debug("Transfer mode is set to Binary for a file upload");
-                srcClient.setType(Session.TYPE_IMAGE);
-            }
-
-            srcClient.transfer(srcURI.getPath(),destClient, remoteFile, false, null);
-
-            log.info("Upload file to:" + remoteFile + " is done");
-
-        } catch (ServerException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
-        } catch (IOException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
-        } catch (ClientException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
-        } finally {
-            if (srcClient != null) {
-                try {
-                    srcClient.close();
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection",e);
-                }
-            }
-        }
-    }
-
-    /**
-     * Upload file to remote location
-     *
-     * @param destURI
-     * @param gsCredential
-     * @param localFile
-     * @throws GFacException
-     */
-    public void uploadFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
-        GridFTPClient ftpClient = null;
-        GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
-        try {
-
-            String remoteFile = destURI.getPath();
-
-            log.info("The local temp file is " + localFile);
-            log.info("the remote file is " + remoteFile);
-
-            log.debug("Setup GridFTP Client");
-
-            ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
-            ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-            ftpClient.authenticate(gsCredential);
-            ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-
-            log.debug("Uploading file");
-            if (checkBinaryExtensions(remoteFile)) {
-                log.debug("Transfer mode is set to Binary for a file upload");
-                ftpClient.setType(Session.TYPE_IMAGE);
-            }
-
-
-            ftpClient.put(localFile, remoteFile, false);
-
-            log.info("Upload file to:" + remoteFile + " is done");
-
-        } catch (ServerException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
-        } catch (IOException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
-        } catch (ClientException e) {
-            throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
-        } finally {
-            if (ftpClient != null) {
-                try {
-                    ftpClient.close();
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection",e);
-                }
-            }
-        }
-    }
-
-    /**
-     * Download File from remote location
-     *
-     * @param destURI
-     * @param gsCredential
-     * @param localFile
-     * @throws GFacException
-     */
-    public void downloadFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
-        GridFTPClient ftpClient = null;
-        GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
-        try {
-            String remoteFile = destURI.getPath();
-
-            log.info("The local temp file is " + localFile);
-            log.info("the remote file is " + remoteFile);
-
-            log.debug("Setup GridFTP Client");
-
-            ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
-            ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-            ftpClient.authenticate(gsCredential);
-            ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-
-            log.debug("Downloading file");
-            if (checkBinaryExtensions(remoteFile)) {
-                log.debug("Transfer mode is set to Binary to download a file");
-                ftpClient.setType(Session.TYPE_IMAGE);
-            }
-
-            ftpClient.get(remoteFile, localFile);
-
-            log.info("Download file to:" + localFile + " is done");
-
-        } catch (ServerException e) {
-            throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
-        } catch (IOException e) {
-            throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
-        } catch (ClientException e) {
-            throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
-        } finally {
-            if (ftpClient != null) {
-                try {
-                    //ftpClient.close();
-                    ftpClient.close(false);
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection",e);
-                }
-            }
-        }
-    }
-
-    /**
-     * Stream remote file
-     *
-     * @param destURI
-     * @param gsCredential
-     * @param localFile
-     * @return
-     * @throws GFacException
-     */
-    public String readRemoteFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
-        BufferedReader instream = null;
-        File localTempfile = null;
-        try {
-
-            if (localFile == null) {
-                localTempfile = File.createTempFile("stderr", "err");
-            } else {
-                localTempfile = localFile;
-            }
-
-            log.info("Local temporary file:" + localTempfile);
-
-            downloadFile(destURI, gsCredential, localTempfile);
-
-            instream = new BufferedReader(new FileReader(localTempfile));
-            StringBuffer buff = new StringBuffer();
-            String temp = null;
-            while ((temp = instream.readLine()) != null) {
-                buff.append(temp);
-                buff.append(Constants.NEWLINE);
-            }
-
-            log.info("finish read file:" + localTempfile);
-
-            return buff.toString();
-        } catch (FileNotFoundException e) {
-            throw new ToolsException("Cannot read localfile file:" + localTempfile, e);
-        } catch (IOException e) {
-            throw new ToolsException("Cannot read localfile file:" + localTempfile, e);
-        } finally {
-            if (instream != null) {
-                try {
-                    instream.close();
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection",e);
-                }
-            }
-        }
-    }
-
-    /**
-     * Transfer data from one GridFTp Endpoint to another GridFTP Endpoint
-     *
-     * @param srchost
-     * @param desthost
-     * @param gssCred
-     * @param srcActive
-     * @throws ServerException
-     * @throws ClientException
-     * @throws IOException
-     */
-    public void transfer(URI srchost, URI desthost, GSSCredential gssCred, boolean srcActive) throws ToolsException {
-        GridFTPClient destClient = null;
-        GridFTPClient srcClient = null;
-
-        try {
-            destClient = new GridFTPClient(desthost.getHost(), desthost.getPort());
-            destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-            destClient.authenticate(gssCred);
-
-            if (checkBinaryExtensions(desthost.getPath())) {
-                log.debug("Transfer mode is set to Binary");
-                destClient.setType(Session.TYPE_IMAGE);
-            }
-
-            srcClient = new GridFTPClient(srchost.getHost(), srchost.getPort());
-            srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-            srcClient.authenticate(gssCred);
-
-            if (checkBinaryExtensions(srchost.getPath())) {
-                log.debug("Transfer mode is set to Binary");
-                srcClient.setType(Session.TYPE_IMAGE);
-            }
-
-            if (srcActive) {
-                log.debug("Set src active");
-                HostPort hp = destClient.setPassive();
-                srcClient.setActive(hp);
-            } else {
-                log.debug("Set dst active");
-                HostPort hp = srcClient.setPassive();
-                destClient.setActive(hp);
-            }
-
-            log.debug("Start transfer file from GridFTP:" + srchost.toString() + " to " + desthost.toString());
-
-            /**
-             * Transfer a file. The transfer() function blocks until the transfer is complete.
-             */
-            srcClient.transfer(srchost.getPath(), destClient, desthost.getPath(), false, null);
-            if (srcClient.getSize(srchost.getPath()) == destClient.getSize(desthost.getPath())) {
-                log.debug("CHECK SUM OK");
-            } else {
-                log.debug("****CHECK SUM FAILED****");
-            }
-
-        } catch (ServerException e) {
-            throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
-                    + desthost.toString(), e);
-        } catch (IOException e) {
-            throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
-                    + desthost.toString(), e);
-        } catch (ClientException e) {
-            throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
-                    + desthost.toString(), e);
-        } finally {
-            if (destClient != null) {
-                try {
-                    destClient.close();
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection at Desitnation:" + desthost.toString());
-                }
-            }
-            if (srcClient != null) {
-                try {
-                    srcClient.close();
-                } catch (Exception e) {
-                    log.warn("Cannot close GridFTP client connection at Source:" + srchost.toString(),e);
-                }
-            }
-        }
-    }
-
-	/**
-	 * List files in a GridFTP directory
-	 * @param dirURI
-	 * @param gssCred
-	 * @return
-	 * @throws ToolsException
-	 */
-    @SuppressWarnings("unchecked")
-	public List<String> listDir(URI dirURI, GSSCredential gssCred) throws ToolsException {
-    	List<String> files = new  ArrayList<String>();
-	    GridFTPClient srcClient = null;
-			try {
-				GridFTPContactInfo contactInfo = new GridFTPContactInfo(dirURI.getHost(), dirURI.getPort());
-
-				srcClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
-				srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
-				srcClient.authenticate(gssCred);
-				srcClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-				srcClient.setType(Session.TYPE_ASCII);
-				srcClient.changeDir(dirURI.getPath());
-
-				Vector<Object> fileInfo = null;
-				try {
-					fileInfo = srcClient.mlsd();
-				} catch (Throwable e) {
-					fileInfo = srcClient.list();
-				}
-
-				if (!fileInfo.isEmpty()) {
-					for (int j = 0; j < fileInfo.size(); ++j) {
-						String name = null;
-						if (fileInfo.get(j) instanceof MlsxEntry) {
-							name = ((MlsxEntry) fileInfo.get(j)).getFileName();
-						} else if (fileInfo.get(j) instanceof FileInfo) {
-							name = ((FileInfo) fileInfo.get(j)).getName();
-						} else {
-							throw new ToolsException("Unsupported type returned by gridftp " + fileInfo.get(j));
-						}
-
-						if (!name.equals(".") && !name.equals("..")) {
-							URI uri = GramProviderUtils.createGsiftpURI(contactInfo.hostName, dirURI.getPath() + File.separator + name);
-							files.add(uri.getPath());
-						}
-					}
-				}
-				return files;
-			} catch (IOException e) {
-				throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
-			} catch (ServerException e) {
-				throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
-			} catch (ClientException e) {
-				throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
-			} catch (URISyntaxException e) {
-				throw new ToolsException("Error creating URL of listed files: " + dirURI.toString() ,e);
-			} finally {
-				if (srcClient != null) {
-	                try {
-	                    srcClient.close();
-	                } catch (Exception e) {
-	                    log.warn("Cannot close GridFTP client connection", e);
-	                }
-	            }
-		}
-	}
-    /**
-     * Method to check file extension as binary to set transfer type
-     * @param filePath
-     * @return
-     */
-    private static boolean checkBinaryExtensions(String filePath){
-        String extension = filePath.substring(filePath.lastIndexOf(".")+1,filePath.length());
-        Set<String> extensions = new HashSet<String>(Arrays.asList(new String[] {"tar","zip","gz","tgz"}));
-        if(extensions.contains(extension)){
-            return true;
-        }else{
-            return false;
-        }
-
-    }
-
-
-
-
-    public String gridFTPFileExist(URI inputDirectory,String fileName,GSSCredential gssCred) throws ToolsException {
-        List<String> strings = listDir(inputDirectory, gssCred);
-        for(String fileExist:strings){
-            if(fileName.equals(fileExist)) {
-                fileName = "duplicate_" + fileName;
-                return fileName;
-            }
-        }
-        return fileName;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GramDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GramDirectorySetupHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GramDirectorySetupHandler.java
deleted file mode 100644
index f2ccb9a..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GramDirectorySetupHandler.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.handler;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Properties;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.apache.airavata.gfac.gram.external.GridFtp;
-import org.apache.airavata.gfac.gram.util.GramProviderUtils;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.TransferState;
-import org.apache.airavata.model.workspace.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class  GramDirectorySetupHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(GramDirectorySetupHandler.class);
-   
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        log.info("Invoking GramDirectorySetupHandler ...");
-        super.invoke(jobExecutionContext);
-        String[] gridFTPEndpointArray = null;
-
-        //TODO: why it is tightly coupled with gridftp
-//        GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
-        //TODO: make it more reusable
-        HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
-
-
-        if(hostType instanceof GlobusHostType){
-        	gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
-        }
-        else if (hostType instanceof UnicoreHostType){
-        	gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
-        }
-        
-
-
-        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
-        GridFtp ftp = new GridFtp();
-
-        try {
-
-            GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
-                    getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-
-            if (gridFTPEndpointArray == null || gridFTPEndpointArray.length == 0) {
-            	gridFTPEndpointArray = new String[]{hostType.getHostAddress()};
-            }
-            boolean success = false;
-            GFacHandlerException pe = null;// = new ProviderException("");
-            for (String endpoint : gridFTPEndpointArray) {
-                try {
-
-                    URI tmpdirURI = GramProviderUtils.createGsiftpURI(endpoint, app.getScratchWorkingDirectory());
-                    URI workingDirURI = GramProviderUtils.createGsiftpURI(endpoint, app.getStaticWorkingDirectory());
-                    URI inputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getInputDataDirectory());
-                    URI outputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
-
-                    log.info("Host FTP = " + gridFTPEndpointArray[0]);
-                    log.info("temp directory = " + tmpdirURI);
-                    log.info("Working directory = " + workingDirURI);
-                    log.info("Input directory = " + inputURI);
-                    log.info("Output directory = " + outputURI);
-                    ftp.makeDir(tmpdirURI, gssCred);
-                    ftp.makeDir(workingDirURI, gssCred);
-                    ftp.makeDir(inputURI, gssCred);
-                    ftp.makeDir(outputURI, gssCred);
-                    success = true;
-                    DataTransferDetails detail = new DataTransferDetails();
-                    TransferStatus status = new TransferStatus();
-                    status.setTransferState(TransferState.DIRECTORY_SETUP);
-                    detail.setTransferStatus(status);
-                    detail.setTransferDescription("Working directory = " + workingDirURI);
-                    registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-                                  
-                    break;
-                } catch (URISyntaxException e) {
-                    pe = new GFacHandlerException("URI is malformatted:" + e.getMessage(), e);
-
-                } catch (Exception e) {
-              	pe = new GFacHandlerException(e.getMessage(), e);
-                }
-            }
-            if (success == false) {
-            	GFacUtils.saveErrorDetails(jobExecutionContext, pe.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-        		throw pe;
-            }
-        } catch (SecurityException e) {
-            throw new GFacHandlerException(e.getMessage(), e);
-        } catch (ApplicationSettingsException e1) {
-        	throw new GFacHandlerException(e1.getMessage(), e1);
-		} catch (GFacException e) {
-            throw new GFacHandlerException(e);
-        }
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPInputHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPInputHandler.java
deleted file mode 100644
index ae81357..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPInputHandler.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.handler;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.*;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.apache.airavata.gfac.gram.external.GridFtp;
-import org.apache.airavata.gfac.gram.util.GramProviderUtils;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.AppDescriptorCheckHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.TransferState;
-import org.apache.airavata.model.workspace.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GridFTPInputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(AppDescriptorCheckHandler.class);
- 
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        log.info("Invoking GridFTPInputHandler ...");
-        super.invoke(jobExecutionContext);
-        DataTransferDetails detail = new DataTransferDetails();
-        TransferStatus status = new TransferStatus();
-       
-        MessageContext inputNew = new MessageContext();
-        try {
-            MessageContext input = jobExecutionContext.getInMessageContext();
-            Set<String> parameters = input.getParameters().keySet();
-            for (String paramName : parameters) {
-                ActualParameter actualParameter = (ActualParameter) input.getParameters().get(paramName);
-                String paramValue = MappingFactory.toString(actualParameter);
-                //TODO: Review this with type
-                if ("URI".equals(actualParameter.getType().getType().toString())) {
-                    ((URIParameterType) actualParameter.getType()).setValue(stageInputFiles(jobExecutionContext, paramValue));
-                } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-                    List<String> newFiles = new ArrayList<String>();
-                    for (String paramValueEach : split) {
-                        String stageInputFiles = stageInputFiles(jobExecutionContext, paramValueEach);
-                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
-                        status.setTransferState(TransferState.UPLOAD);
-                        detail.setTransferStatus(status);
-                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-                   
-                        newFiles.add(stageInputFiles);
-                    }
-                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
-                }
-                inputNew.getParameters().put(paramName, actualParameter);
-               
-            }
-        } catch (Exception e) {
-        	 try {
-         	    status.setTransferState(TransferState.FAILED);
- 				detail.setTransferStatus(status);
- 				registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
- 				GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
- 			} catch (Exception e1) {
-  			    throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
-  		   }
-            log.error(e.getMessage());
-            throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
-        }
-        jobExecutionContext.setInMessageContext(inputNew);
-    }
-
-    private static String stageInputFiles(JobExecutionContext jobExecutionContext, String paramValue) throws URISyntaxException, SecurityException, ToolsException, IOException,GFacException, ApplicationSettingsException {
-        URI gridftpURL = new URI(paramValue);
-
-        String[] gridFTPEndpointArray = null;
-
-        // not to download input files to the input dir if its http / gsiftp
-        // but if local then yes
-        boolean isInputNonLocal = true;
-
-        //TODO: why it is tightly coupled with gridftp
-//        GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
-        //TODO: make it more reusable
-        HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
-        if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
-        	gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
-        }
-        else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
-        	gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
-        	isInputNonLocal = false;
-        }
-        else {
-        	//TODO
-        }
-
-
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-        GridFtp ftp = new GridFtp();
-        URI destURI = null;
-        GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-
-        for (String endpoint : gridFTPEndpointArray) {
-            URI inputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getInputDataDirectory());
-            String fileName = new File(gridftpURL.getPath()).getName();
-            fileName = ftp.gridFTPFileExist(inputURI, fileName,gssCred);
-
-            String destLocalPath = inputURI.getPath() + File.separator + fileName;
-            //if user give a url just to refer an endpoint, not a web resource we are not doing any transfer
-            if (fileName != null && !"".equals(fileName)) {
-                destURI = GramProviderUtils.createGsiftpURI(endpoint, destLocalPath);
-                if (paramValue.startsWith("gsiftp")) {
-                	// no need to do if it is unicore, as unicore will download this on user's behalf to the job space dir
-                	if(isInputNonLocal) ftp.uploadFile(gridftpURL, destURI, gssCred);
-                	else return paramValue;
-                } else if (paramValue.startsWith("file")) {
-                    String localFile = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-                    FileInputStream fis = null;
-                    try {
-                    	fis = new FileInputStream(localFile);
-                    	ftp.uploadFile(destURI, gssCred, fis);
-                    } catch (IOException e) {
-                        throw new GFacException("Unable to create file : " + localFile ,e);
-                    } finally {
-                        if (fis != null) {
-                            fis.close();
-                        }
-                    }
-                } else if (paramValue.startsWith("http")) {
-                	// no need to do if it is unicore
-                	if(isInputNonLocal) {
-                		InputStream is = null;
-                		try {
-                			is = gridftpURL.toURL().openStream();
-                			ftp.uploadFile(destURI, gssCred, (is));
-                		}finally {
-                			is.close();
-                		}
-                	} else {
-                		// don't return destUri
-                		return paramValue;
-                	}
-
-                } else {
-                    //todo throw exception telling unsupported protocol
-                    return paramValue;
-                }
-            } else {
-                // When the given input is not a web resource but a URI type input, then we don't do any transfer just keep the same value as it isin the input
-                return paramValue;
-            }
-        }
-        return destURI.getPath();
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
deleted file mode 100644
index 850608f..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gram.handler;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.*;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.core.utils.OutputUtils;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.apache.airavata.gfac.gram.external.GridFtp;
-import org.apache.airavata.gfac.gram.util.GramProviderUtils;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.model.workspace.experiment.TransferState;
-import org.apache.airavata.model.workspace.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.registry.cpi.Registry;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.apache.airavata.schemas.gfac.StringArrayType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-public class GridFTPOutputHandler extends AbstractHandler {
-    private static final Logger log = LoggerFactory.getLogger(GridFTPOutputHandler.class);
-    private Registry registry;
-
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-       log.info("Invoking GridFTPOutputHandler ...");
-       super.invoke(jobExecutionContext);
-        
-       ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-
- 	   HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
- 	   String[] gridFTPEndpointArray = null;
- 	   String hostName = null;
- 
-       if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
-        	gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
-        	hostName = ((GlobusHostType) hostType).getHostName();
- 
-       }
-       else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
-        	gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
-        	hostName = ((UnicoreHostType) hostType).getHostName();
-       }
-       else {
-        	//TODO
-       }
-
-       GridFtp ftp = new GridFtp();
-       File localStdErrFile = null;
-       Map<String, ActualParameter> stringMap = new HashMap<String, ActualParameter>();
-	   DataTransferDetails detail = new DataTransferDetails();
-	   TransferStatus status = new TransferStatus();
-
-       try {
-    	    GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-    	    String[] hostgridFTP = gridFTPEndpointArray;
-            if (hostgridFTP == null || hostgridFTP.length == 0) {
-                hostgridFTP = new String[]{hostName};
-            }
-            for (String endpoint : gridFTPEndpointArray) {
-                try {
-                    /*
-                     *  Read Stdout and Stderror
-                     */
-					URI stdoutURI = GramProviderUtils.createGsiftpURI(endpoint, app.getStandardOutput());
-                    URI stderrURI = GramProviderUtils.createGsiftpURI(endpoint, app.getStandardError());
-                	status.setTransferState(TransferState.COMPLETE);
-					detail.setTransferStatus(status);
-					detail.setTransferDescription("STDOUT:" + stdoutURI.toString());
-                    registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-                    status.setTransferState(TransferState.COMPLETE);
-					detail.setTransferStatus(status);
-					detail.setTransferDescription("STDERR:" + stderrURI.toString());
-                    registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-                  
-                    log.info("STDOUT:" + stdoutURI.toString());
-                    log.info("STDERR:" + stderrURI.toString());
-
-                    File logDir = new File("./service_logs");
-                    if (!logDir.exists()) {
-                        logDir.mkdir();
-                    }
-
-                    String timeStampedServiceName = GFacUtils.createUniqueNameWithDate(jobExecutionContext
-                            .getApplicationName());
-                    File localStdOutFile = File.createTempFile(timeStampedServiceName, "stdout");
-                    localStdErrFile = File.createTempFile(timeStampedServiceName, "stderr");
-
-
-                    String stdout = null;
-                    String stderr = null;
-
-                    // TODO: what if job is failed
-                    // and this handler is not able to find std* files?
-                    try {
-                     stdout = ftp.readRemoteFile(stdoutURI, gssCred, localStdOutFile);
-                     stderr = ftp.readRemoteFile(stderrURI, gssCred, localStdErrFile);
-                     //TODO: do we also need to set them as output parameters for another job
-                     ApplicationDescription application = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-                     ApplicationDeploymentDescriptionType appDesc = application.getType();
-                     appDesc.setStandardOutput(stdout);
-                     appDesc.setStandardError(stderr);
-                     jobExecutionContext.getApplicationContext().setApplicationDeploymentDescription(application);
-                    }
-                    catch(ToolsException e) {
-                        log.error("Cannot download stdout/err files. One reason could be the job is not successfully finished:  "+e.getMessage());
-                    }
-
-                    List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
-                    Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-                    Set<String> keys = output.keySet();
-                    for (String paramName : keys) {
-                        ActualParameter actualParameter = (ActualParameter) output.get(paramName);
-                        if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-                            URI outputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
-                            List<String> outputList = ftp.listDir(outputURI, gssCred);
-                            String[] valueList = outputList.toArray(new String[outputList.size()]);
-                            ((URIArrayType) actualParameter.getType()).setValueArray(valueList);
-                            stringMap.put(paramName, actualParameter);
-                        }else if ("StringArray".equals(actualParameter.getType().getType().toString())) {
-                            String[] valueList = OutputUtils.parseStdoutArray(stdout, paramName);
-                            ((StringArrayType) actualParameter.getType()).setValueArray(valueList);
-                            stringMap.put(paramName, actualParameter);
-                        } else if ("URI".equals(actualParameter.getType().getType().toString())) {
-                        	  URI outputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
-                              List<String> outputList = ftp.listDir(outputURI, gssCred);
-							if (outputList.size() == 0 || outputList.get(0).isEmpty()) {
-								OutputUtils.fillOutputFromStdout(output, stdout, stderr,outputArray);
-							} else {
-								String valueList = outputList.get(0);
-								((URIParameterType) actualParameter.getType()).setValue(valueList);
-								stringMap = new HashMap<String, ActualParameter>();
-								stringMap.put(paramName, actualParameter);
-							}
-                        }
-                        else {
-                            // This is to handle exception during the output parsing.
-                            OutputUtils.fillOutputFromStdout(output, stdout, stderr,outputArray);
-                        }
-                        status.setTransferState(TransferState.DOWNLOAD);
-    					detail.setTransferStatus(status);
-    					detail.setTransferDescription("Output: " + stringMap.get(paramName).toString());
-                        registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-                      
-                    }
-                    if (outputArray == null || outputArray.isEmpty()) {
-                        throw new GFacHandlerException("Empty Output returned from the Application, Double check the application" +
-                                "and ApplicationDescriptor output Parameter Names");
-                    }
-                    // If users has given an output Data path to download the output files this will download the file on machine where GFac is installed
-                    TaskDetails taskData =  jobExecutionContext.getTaskData();
-                    if(taskData != null && taskData.getAdvancedOutputDataHandling() != null){
-                    	String outputDataDirectory = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
-                            if(outputDataDirectory != null && !"".equals(outputDataDirectory)){
-                                stageOutputFiles(jobExecutionContext,outputDataDirectory);
-                            }
-                    }
-                } catch (ToolsException e) {
-                    log.error(e.getMessage());
-                    throw new GFacHandlerException(e.getMessage() + "\n StdError Data: \n" +readLastLinesofStdOut(localStdErrFile.getPath(), 20),e);
-                } catch (URISyntaxException e) {
-                    log.error(e.getMessage());
-                    throw new GFacHandlerException("URI is malformatted:" + e.getMessage(), e, readLastLinesofStdOut(localStdErrFile.getPath(), 20));
-                }
-            }
-        } catch (Exception e) {
-        	 try {
-        	    status.setTransferState(TransferState.FAILED);
-				detail.setTransferStatus(status);
-				registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-				GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
-	 		} catch (Exception e1) {
- 			    throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
- 		   }
-        	log.error(e.getMessage());
-            throw new GFacHandlerException(e.getMessage(), e, readLastLinesofStdOut(localStdErrFile.getPath(), 20));
-        }
-
-    }
-
-    private static String readLastLinesofStdOut(String path, int count) {
-        StringBuffer buffer = new StringBuffer();
-        FileInputStream in = null;
-        try {
-            in = new FileInputStream(path);
-        } catch (FileNotFoundException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-        }
-        BufferedReader br = new BufferedReader(new InputStreamReader(in));
-        List<String> strLine = new ArrayList<String>();
-        String tmp = null;
-        int numberofLines = 0;
-        try {
-            while ((tmp = br.readLine()) != null) {
-                strLine.add(tmp);
-                numberofLines++;
-            }
-        } catch (IOException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-        }
-        if (numberofLines > count) {
-            for (int i = numberofLines - count; i < numberofLines; i++) {
-                buffer.append(strLine.get(i));
-                buffer.append("\n");
-            }
-        } else {
-            for (int i = 0; i < numberofLines; i++) {
-                buffer.append(strLine.get(i));
-                buffer.append("\n");
-            }
-        }
-        try {
-            in.close();
-        } catch (IOException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-        }
-        return buffer.toString();
-    }
-
-    private static void stageOutputFiles(JobExecutionContext jobExecutionContext, String outputFileStagingPath) throws GFacProviderException,GFacException, ApplicationSettingsException {
-
-
-    	   HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-    	   String[] gridFTPEndpointArray = null;
-
-           if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
-           	gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
-           }
-           else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
-           	gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
-           }
-           else {
-           	//TODO
-           }
-
-
-        MessageContext outputNew = new MessageContext();
-        MessageContext output = jobExecutionContext.getOutMessageContext();
-        Map<String, Object> parameters = output.getParameters();
-        for (String paramName : parameters.keySet()) {
-            ActualParameter actualParameter = (ActualParameter) parameters
-                    .get(paramName);
-
-            GridFtp ftp = new GridFtp();
-            GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-            try {
-                if ("URI".equals(actualParameter.getType().getType().toString())) {
-                    for (String endpoint : gridFTPEndpointArray) {
-                        ((URIParameterType) actualParameter.getType()).setValue(doStaging(outputFileStagingPath,
-                                MappingFactory.toString(actualParameter), ftp, gssCred, endpoint));
-                    }
-                } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(MappingFactory.toString(actualParameter)));
-                    List<String> newFiles = new ArrayList<String>();
-                    for (String endpoint : gridFTPEndpointArray) {
-                        for (String paramValueEach : split) {
-                            newFiles.add(doStaging(outputFileStagingPath, paramValueEach, ftp, gssCred, endpoint));
-                        }
-                        ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
-                    }
-
-                }
-            } catch (URISyntaxException e) {
-                log.error(e.getMessage());
-                throw new GFacProviderException(e.getMessage(), e);
-            } catch (ToolsException e) {
-                log.error(e.getMessage());
-                throw new GFacProviderException(e.getMessage(), e);
-            }
-            outputNew.getParameters().put(paramName, actualParameter);
-        }
-        jobExecutionContext.setOutMessageContext(outputNew);
-    }
-
-    private static String doStaging(String outputFileStagingPath, String paramValue, GridFtp ftp, GSSCredential gssCred, String endpoint) throws URISyntaxException, ToolsException {
-        URI srcURI = GramProviderUtils.createGsiftpURI(endpoint, paramValue);
-        String fileName = new File(srcURI.getPath()).getName();
-        File outputpath = new File(outputFileStagingPath);
-        if(!outputpath.exists()){
-        	outputpath.mkdirs();
-        }
-        File outputFile = new File(outputpath.getAbsolutePath() + File.separator + fileName);
-        ftp.readRemoteFile(srcURI,
-                gssCred, outputFile);
-        return outputFileStagingPath + File.separator + fileName;
-    }
-
-    public void initProperties(Properties properties) throws GFacHandlerException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/persistence/DBJobPersistenceManager.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/persistence/DBJobPersistenceManager.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/persistence/DBJobPersistenceManager.java
deleted file mode 100644
index 67ba1a5..0000000
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/persistence/DBJobPersistenceManager.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.gram.persistence;
-
-import org.apache.airavata.common.utils.DBUtil;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.persistence.JobData;
-import org.apache.airavata.gfac.core.persistence.JobPersistenceManager;
-import org.apache.log4j.Logger;
-import org.globus.gram.internal.GRAMConstants;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 6/18/13
- * Time: 4:16 PM
- * Database based job persistence manager. Current default implementation.
- */
-
-public class DBJobPersistenceManager implements JobPersistenceManager {
-
-    private DBUtil dbUtil;
-
-    private static final Logger log = Logger.getLogger(DBJobPersistenceManager.class);
-
-
-    public DBJobPersistenceManager(DBUtil db) {
-        this.dbUtil = db;
-    }
-
-    public synchronized void updateJobStatus(JobData jobData) throws GFacException {
-
-        if (jobData.getState() == GRAMConstants.STATUS_UNSUBMITTED) {
-            insertJob(jobData);
-        } else {
-
-            String sql = "update gram_job set status = ? where job_id = ?";
-
-            Connection connection = null;
-            PreparedStatement stmt = null;
-
-            try {
-                connection = getConnection();
-                stmt = connection.prepareStatement(sql);
-                stmt.setInt(1, jobData.getState());
-                stmt.setString(2, jobData.getJobId());
-
-                stmt.executeUpdate();
-                connection.commit();
-
-            } catch (SQLException e) {
-                throw new GFacException(e);
-            } finally {
-                try {
-                    if (stmt != null) {
-                        stmt.close();
-                    }
-
-                    if (connection != null) {
-                        connection.close();
-                    }
-
-                } catch (SQLException e) {
-                    log.error("Error closing streams", e);
-                }
-            }
-        }
-    }
-
-    private void insertJob(JobData jobData) throws GFacException {
-
-        String sql = "insert into gram_job values (?, ?)";
-
-        PreparedStatement stmt = null;
-        Connection connection = null;
-
-        try {
-            connection = getConnection();
-            stmt = connection.prepareStatement(sql);
-            stmt.setString(1, jobData.getJobId());
-            stmt.setInt(2, jobData.getState());
-
-            stmt.executeUpdate();
-        } catch (SQLException e) {
-            throw new GFacException(e);
-        } finally {
-            try {
-                if (stmt != null) {
-                    stmt.close();
-                }
-
-                if (connection != null) {
-                    connection.close();
-                }
-
-            } catch (SQLException e) {
-                log.error("Error closing streams", e);
-            }
-        }
-
-    }
-
-    public List<JobData> getRunningJobs() throws GFacException {
-
-        String sql = "select * from gram_job where status not in (?, ?, ?)";
-
-        int[] statuses = new int[3];
-        statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
-        statuses[1] = GRAMConstants.STATUS_DONE;
-        statuses[2] = GRAMConstants.STATUS_FAILED;
-
-        return getJobs(sql, statuses);
-    }
-
-    public List<JobData> getFailedJobs() throws GFacException {
-
-        String sql = "select * from gram_job where status in (?)";
-
-        int[] statuses = new int[1];
-        statuses[0] = GRAMConstants.STATUS_FAILED;
-
-        return getJobs(sql, statuses);
-    }
-
-    public List<JobData> getUnSubmittedJobs() throws GFacException {
-
-        String sql = "select * from gram_job where status in (?)";
-
-        int[] statuses = new int[1];
-        statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
-
-        return getJobs(sql, statuses);
-    }
-
-    public List<JobData> getSuccessfullyCompletedJobs() throws GFacException {
-
-        String sql = "select * from gram_job where status in (?)";
-
-        int[] statuses = new int[1];
-        statuses[0] = GRAMConstants.STATUS_DONE;
-
-        return getJobs(sql, statuses);
-
-    }
-
-
-    protected List<JobData> getJobs(String sql, int[] statuses) throws GFacException {
-
-        List<JobData> jobs = new ArrayList<JobData>();
-
-        PreparedStatement preparedStatement = null;
-        Connection connection = null;
-
-        try {
-            connection = getConnection();
-            preparedStatement = connection.prepareStatement(sql);
-
-            int index = 1;
-            for (int status : statuses) {
-                preparedStatement.setInt(index, status);
-                ++index;
-            }
-
-            ResultSet resultSet = preparedStatement.executeQuery();
-
-            while (resultSet.next()) {
-
-                String jobId = resultSet.getString("job_id");
-                int state = resultSet.getInt("status");
-
-                jobs.add(new JobData(jobId, state));
-            }
-
-        } catch (SQLException e) {
-            throw new GFacException(e);
-        } finally {
-            try {
-                if (preparedStatement != null) {
-                    preparedStatement.close();
-                }
-
-                if (connection != null) {
-                    connection.close();
-                }
-
-            } catch (SQLException e) {
-                log.error("Error closing connection", e);
-            }
-        }
-
-        return jobs;
-    }
-
-    private synchronized Connection getConnection() throws SQLException {
-        Connection connection = dbUtil.getConnection();
-        connection.setAutoCommit(true);
-
-        return connection;
-    }
-}


[4/4] airavata git commit: removed gfac-ec2, gfac-gram and gfac-hadoop modules from source.

Posted by sh...@apache.org.
removed gfac-ec2, gfac-gram and gfac-hadoop modules from source.


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/70239916
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/70239916
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/70239916

Branch: refs/heads/master
Commit: 70239916903505c8ec195108eed708f804d720b7
Parents: 742edee
Author: shamrath <sh...@gmail.com>
Authored: Fri May 8 11:54:58 2015 -0400
Committer: shamrath <sh...@gmail.com>
Committed: Fri May 8 11:54:58 2015 -0400

----------------------------------------------------------------------
 modules/gfac/gfac-ec2/pom.xml                   | 135 -----
 .../gfac/ec2/AmazonInstanceScheduler.java       | 233 --------
 .../gfac/ec2/AmazonSecurityContext.java         |  80 ---
 .../apache/airavata/gfac/ec2/AmazonUtil.java    | 142 -----
 .../apache/airavata/gfac/ec2/EC2Provider.java   | 365 ------------
 .../airavata/gfac/ec2/EC2ProviderEvent.java     |  37 --
 .../airavata/gfac/ec2/GreedyScheduler.java      |  92 ---
 .../airavata/gfac/ec2/SchedulingAlgorithm.java  |  36 --
 .../airavata/gfac/ec2/util/AmazonEC2Util.java   | 118 ----
 .../airavata/gfac/ec2/util/EC2ProviderUtil.java | 173 ------
 .../src/main/resources/errors.properties        | 197 -------
 .../src/main/resources/service.properties       |  57 --
 .../airavata/gfac/ec2/EC2ProviderTest.java      | 195 -------
 .../gfac/gfac-ec2/src/test/resources/echo.bat   |  22 -
 .../src/test/resources/logging.properties       |  42 --
 .../src/test/resources/service.properties       |  67 ---
 modules/gfac/gfac-gram/pom.xml                  | 124 -----
 .../airavata/gfac/gram/external/GridFtp.java    | 558 -------------------
 .../gram/handler/GramDirectorySetupHandler.java | 139 -----
 .../gfac/gram/handler/GridFTPInputHandler.java  | 203 -------
 .../gfac/gram/handler/GridFTPOutputHandler.java | 343 ------------
 .../persistence/DBJobPersistenceManager.java    | 225 --------
 .../gfac/gram/provider/impl/GramProvider.java   | 539 ------------------
 .../gfac/gram/security/GSISecurityContext.java  | 275 ---------
 .../gram/util/GramJobSubmissionListener.java    | 141 -----
 .../gfac/gram/util/GramProviderUtils.java       | 113 ----
 .../gfac/gram/util/GramRSLGenerator.java        | 211 -------
 .../gfac/gram/util/GridFTPContactInfo.java      |  61 --
 .../src/main/resources/errors.properties        | 197 -------
 .../src/main/resources/service.properties       |  58 --
 .../impl/GFacBaseTestWithMyProxyAuth.java       | 115 ----
 .../impl/GramProviderTestWithMyProxyAuth.java   | 225 --------
 .../src/test/resources/PBSTemplate.xslt         |  73 ---
 .../src/test/resources/logging.properties       |  42 --
 modules/gfac/gfac-hadoop/pom.xml                | 116 ----
 .../hadoop/handler/HDFSDataMovementHandler.java | 103 ----
 .../hadoop/handler/HadoopDeploymentHandler.java | 276 ---------
 .../hadoop/provider/impl/HadoopProvider.java    | 154 -----
 .../gfac/hadoop/provider/utils/HadoopUtils.java |  60 --
 .../src/main/resources/errors.properties        | 197 -------
 .../src/main/resources/service.properties       |  58 --
 .../src/test/resources/PBSTemplate.xslt         |  73 ---
 .../src/test/resources/logging.properties       |  42 --
 modules/gfac/pom.xml                            |   3 -
 modules/workflow-model/workflow-engine/pom.xml  |   4 +-
 modules/xbaya-gui/pom.xml                       |   4 +-
 .../airavata/xbaya/XBayaConfiguration.java      |  18 -
 .../dialogs/amazon/ChangeCredentialWindow.java  |  19 +-
 48 files changed, 11 insertions(+), 6749 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/pom.xml b/modules/gfac/gfac-ec2/pom.xml
deleted file mode 100644
index 4568a11..0000000
--- a/modules/gfac/gfac-ec2/pom.xml
+++ /dev/null
@@ -1,135 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor 
-	license agreements. See the NOTICE file distributed with this work for additional 
-	information regarding copyright ownership. The ASF licenses this file to 
-	you under the Apache License, Version 2.0 (theÏ "License"); you may not use 
-	this file except in compliance with the License. You may obtain a copy of 
-	the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
-	by applicable law or agreed to in writing, software distributed under the 
-	License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
-	OF ANY ~ KIND, either express or implied. See the License for the specific 
-	language governing permissions and limitations under the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<parent>
-		<groupId>org.apache.airavata</groupId>
-		<artifactId>gfac</artifactId>
-		<version>0.15-SNAPSHOT</version>
-		<relativePath>../pom.xml</relativePath>
-	</parent>
-
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>airavata-gfac-ec2</artifactId>
-	<name>Airavata GFac EC2 Implementation</name>
-	<description>The core GFAC EC2 implementation using the framework features</description>
-	<url>http://airavata.apache.org/</url>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-gfac-core</artifactId>
-			<version>${project.version}</version>
-		</dependency>
-
-		<dependency>
-			<groupId>commons-configuration</groupId>
-			<artifactId>commons-configuration</artifactId>
-			<version>1.6</version>
-		</dependency>
-
-		<!-- Logging -->
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-api</artifactId>
-		</dependency>
-
-		<!-- GFAC schemas -->
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-workflow-execution-context</artifactId>
-			<version>${project.version}</version>
-		</dependency>
-		<!-- Workflow Tracking -->
-		<!--<dependency>-->
-			<!--<groupId>org.apache.airavata</groupId>-->
-			<!--<artifactId>airavata-workflow-tracking</artifactId>-->
-			<!--<version>${project.version}</version>-->
-		<!--</dependency>-->
-
-		<!-- SSH -->
-		<dependency>
-			<groupId>net.schmizz</groupId>
-			<artifactId>sshj</artifactId>
-			<version>0.8.0</version>
-		</dependency>
-
-		<!-- Credential Store -->
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-credential-store</artifactId>
-			<version>${project.version}</version>
-		</dependency>
-
-		<!-- Amazon EC2 Provider -->
-		<dependency>
-			<groupId>com.amazonaws</groupId>
-			<artifactId>aws-java-sdk</artifactId>
-			<version>1.3.20</version>
-		</dependency>
-		<dependency>
-			<groupId>sshtools</groupId>
-			<artifactId>j2ssh-core</artifactId>
-			<version>0.2.9</version>
-		</dependency>
-		<dependency>
-			<groupId>sshtools</groupId>
-			<artifactId>j2ssh-common</artifactId>
-			<version>0.2.9</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.httpcomponents</groupId>
-			<artifactId>httpclient</artifactId>
-			<version>4.3</version>
-			<type>jar</type>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.httpcomponents</groupId>
-			<artifactId>httpcore</artifactId>
-			<version>4.3</version>
-			<type>jar</type>
-		</dependency>
-
-		<!-- Test -->
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.testng</groupId>
-			<artifactId>testng</artifactId>
-			<version>6.1.1</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>jcl-over-slf4j</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-log4j12</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-client-configuration</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.bouncycastle</groupId>
-			<artifactId>bcpkix-jdk15on</artifactId>
-		</dependency>
-	</dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java
deleted file mode 100644
index 9dd13dc..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.cloudwatch.AmazonCloudWatchClient;
-import com.amazonaws.services.cloudwatch.model.Datapoint;
-import com.amazonaws.services.cloudwatch.model.Dimension;
-import com.amazonaws.services.cloudwatch.model.GetMetricStatisticsRequest;
-import com.amazonaws.services.cloudwatch.model.GetMetricStatisticsResult;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.DescribeInstancesResult;
-import com.amazonaws.services.ec2.model.Instance;
-import com.amazonaws.services.ec2.model.Reservation;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-public class AmazonInstanceScheduler {
-    private static final Logger log = LoggerFactory.getLogger(AmazonInstanceScheduler.class);
-
-    /* Maximum number of instances that the Scheduler will create*/
-    //private static final int MAX_INSTANCE_COUNT = 3;
-
-    /* Maximum number of minutes an instance should be kept alive*/
-    public static final int INSTANCE_UP_TIME_THRESHOLD = 60;
-
-    private static volatile AmazonInstanceScheduler scheduler = null;
-
-    private static String imageId = null;
-
-    private static AWSCredentials credential = null;
-
-    private static AmazonEC2Client ec2client = null;
-
-    /* The time interval(minutes) in which the instances will be checked whether they have timed-out*/
-    public static final long TERMINATE_THREAD_UPDATE_INTERVAL = 5;
-
-    public static AmazonInstanceScheduler getInstance(String imageId, String accessKey, String secretKey)
-            throws IOException, InvalidKeySpecException, NoSuchAlgorithmException {
-
-        if(scheduler == null) {
-            synchronized (AmazonInstanceScheduler.class) {
-                if(scheduler == null) {
-                    new Thread() {
-                        @Override
-                        public void run() {
-                            //noinspection InfiniteLoopStatement
-                            while(true) {
-                                try {
-                                    Thread.sleep(TERMINATE_THREAD_UPDATE_INTERVAL * 60 * 1000);
-                                } catch (InterruptedException e ) {
-                                    // do-nothing
-                                }
-
-                                try {
-                                    terminateTimedOutAmazonInstances();
-                                } catch (Throwable e) {
-                                    log.error(e.getMessage(), e);
-                                }
-                            }
-
-                        }
-
-                    }.start();
-
-                    scheduler = new AmazonInstanceScheduler();
-                }
-            }
-        }
-
-        AmazonInstanceScheduler.imageId = imageId;
-        AmazonInstanceScheduler.credential = new BasicAWSCredentials(accessKey, secretKey);
-        AmazonInstanceScheduler.ec2client = new AmazonEC2Client(credential);
-
-        return scheduler;
-    }
-
-
-    /**
-     * Returns the amazon instance id of the amazon instance which is having the minimum
-     * CPU utilization (out of the already running instances). If the instance which
-     * is having the minimum CPU utilization exceeds 80%, ami-id will be returned
-     * instead of a an instance id. If a particular running instance's uptime is
-     * greater than 55 minutes, that instance will be shut down.
-     *
-     * @return instance id
-     * @throws NoSuchAlgorithmException
-     * @throws InvalidKeySpecException
-     * @throws IOException
-     */
-    public String getScheduledAmazonInstance()
-            throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
-
-        SchedulingAlgorithm greedyAglo = new GreedyScheduler();
-        return greedyAglo.getScheduledAmazonInstance(ec2client,imageId, credential);
-    }
-
-    /**
-     * Terminates the Amazon instances that are timed out. Timed out refers to the
-     * instances which have been running for more than the INSTANCE_UP_TIME_THRESHOLD.
-     */
-    private static void terminateTimedOutAmazonInstances(){
-        System.out.println("Checking for timed-out instances");
-        List<Instance> instanceList = loadInstances(ec2client);
-        for (Instance instance : instanceList) {
-            String instanceId = instance.getInstanceId();
-
-            long upTime = getInstanceUptime(instance);
-            // if the instance up time is greater than the threshold, terminate the instance
-            if (upTime > INSTANCE_UP_TIME_THRESHOLD) {
-                List<String> requestIds = new ArrayList<String>();
-                requestIds.add(instanceId);
-                // terminate instance
-                System.out.println("Terminating the instance " + instanceId +
-                        " as the up time threshold is exceeded");
-                AmazonUtil.terminateInstances(requestIds);
-            }
-        }
-
-    }
-
-    /**
-     * Calculates the instance up time in minutes.
-     *
-     * @param instance instance to be monitored.
-     * @return up time of the instance.
-     */
-    private static long getInstanceUptime(Instance instance) {
-        Date startTime = instance.getLaunchTime();
-        Date today = new Date();
-        long diff = (today.getTime() - startTime.getTime()) / (1000 * 60);
-        System.out.println("Instance launch time   : " + startTime);
-        System.out.println("Instance up time (mins): " + diff);
-        return diff;
-    }
-
-    /**
-     * Monitors the CPU Utilization using Amazon Cloud Watch. In order to monitor the instance, Cloud Watch Monitoring
-     * should be enabled for the running instance.
-     *
-     * @param credential EC2 credentials
-     * @param instanceId instance id
-     * @return average CPU utilization of the instance
-     */
-    public static double monitorInstance(AWSCredentials credential, String instanceId) {
-        try {
-            AmazonCloudWatchClient cw = new AmazonCloudWatchClient(credential) ;
-
-            long offsetInMilliseconds = 1000 * 60 * 60 * 24;
-            GetMetricStatisticsRequest request = new GetMetricStatisticsRequest()
-                    .withStartTime(new Date(new Date().getTime() - offsetInMilliseconds))
-                    .withNamespace("AWS/EC2")
-                    .withPeriod(60 * 60)
-                    .withDimensions(new Dimension().withName("InstanceId").withValue(instanceId))
-                    .withMetricName("CPUUtilization")
-                    .withStatistics("Average", "Maximum")
-                    .withEndTime(new Date());
-            GetMetricStatisticsResult getMetricStatisticsResult = cw.getMetricStatistics(request);
-
-            double avgCPUUtilization = 0;
-            List dataPoint = getMetricStatisticsResult.getDatapoints();
-            for (Object aDataPoint : dataPoint) {
-                Datapoint dp = (Datapoint) aDataPoint;
-                avgCPUUtilization = dp.getAverage();
-                log.info(instanceId + " instance's average CPU utilization : " + dp.getAverage());
-            }
-
-            return avgCPUUtilization;
-
-        } catch (AmazonServiceException ase) {
-            log.error("Caught an AmazonServiceException, which means the request was made  "
-                    + "to Amazon EC2, but was rejected with an error response for some reason.");
-            log.error("Error Message:    " + ase.getMessage());
-            log.error("HTTP Status Code: " + ase.getStatusCode());
-            log.error("AWS Error Code:   " + ase.getErrorCode());
-            log.error("Error Type:       " + ase.getErrorType());
-            log.error("Request ID:       " + ase.getRequestId());
-
-        }
-        return 0;
-    }
-
-    /**
-     * Load instances associated with the given ec2 client
-     *
-     * @param ec2client ec2 client
-     * @return list of instances
-     */
-    public static List<Instance> loadInstances(AmazonEC2Client ec2client) {
-        List<Instance> resultList = new ArrayList<Instance>();
-        DescribeInstancesResult describeInstancesResult = ec2client.describeInstances();
-        List<Reservation> reservations = describeInstancesResult.getReservations();
-        for (Reservation reservation : reservations) {
-            for (Instance instance : reservation.getInstances()) {
-                System.out.println("instance       : " + instance);
-                if ("running".equalsIgnoreCase(instance.getState().getName())) {
-                    resultList.add(instance);
-                }
-            }
-        }
-        return resultList;
-    }
-
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java
deleted file mode 100644
index 75e55ae..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import org.apache.airavata.gfac.SecurityContext;
-
-public class AmazonSecurityContext implements SecurityContext {
-
-	public static final String AMAZON_SECURITY_CONTEXT = "amazon";
-    private String userName;
-    private String accessKey;
-    private String secretKey;
-    private String amiId;
-    private String instanceType;
-    private String instanceId;
-    private boolean isRunningInstance = false;
-
-    public AmazonSecurityContext(String userName, String accessKey, String secretKey, String amiId, String instanceType) {
-        this.userName = userName;
-        this.accessKey = accessKey;
-        this.secretKey = secretKey;
-        this.amiId = amiId;
-        this.instanceType = instanceType;
-    }
-
-    public AmazonSecurityContext(String userName, String accessKey, String secretKey, String instanceId) {
-        this.userName = userName;
-        this.accessKey = accessKey;
-        this.secretKey = secretKey;
-        this.instanceId = instanceId;
-        this.isRunningInstance = true;
-    }
-
-    public String getAccessKey() {
-        return accessKey;
-    }
-
-    public String getSecretKey() {
-        return secretKey;
-    }
-
-    public String getInstanceId() {
-        return instanceId;
-    }
-
-    public String getInstanceType() {
-        return instanceType;
-    }
-
-    public String getAmiId() {
-        return amiId;
-    }
-
-    public boolean isRunningInstance() {
-        return isRunningInstance;
-    }
-
-    public String getUserName() {
-        return userName;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java
deleted file mode 100644
index 7814096..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.*;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-public class AmazonUtil {
-
-    /* Amazon EC2 instance type */
-    public final static String[] INSTANCE_TYPE =
-            { "t1.micro", "m1.small", "m1.large", "m1.xlarge", "m2.xlarge", "m2.2xlarge",
-                    "m2.4xlarge", "c1.medium", "c1.xlarge" };
-
-    private static AmazonEC2 getEC2Client() {
-        // FIXME : Fix this properly after adding UI components.
-        String accessKey = "";
-        String secretKey = "";
-        AmazonEC2 ec2 = new AmazonEC2Client(new BasicAWSCredentials(accessKey, secretKey));
-        return ec2;
-    }
-
-    /**
-     * Launch a new EC2 instance
-     *
-     * @param amiId
-     * @param type
-     * @param number
-     * @return list of newly launched instances
-     */
-    public static List<Instance> launchInstance(String amiId, String type, Integer number) {
-        List<Instance> resultList = new ArrayList<Instance>();
-
-        RunInstancesRequest request = new RunInstancesRequest(amiId, number, number);
-        request.setInstanceType(type);
-
-        RunInstancesResult result = getEC2Client().runInstances(request);
-        resultList.addAll(result.getReservation().getInstances());
-        return resultList;
-    }
-
-    /**
-     * Launch a new EC2 instance
-     *
-     * @param amiId
-     * @param type
-     * @param number
-     * @param keyname
-     * @return list of newly launched instances
-     */
-    public static List<Instance> launchInstance(String amiId, String type, Integer number, String keyname) {
-        List<Instance> resultList = new ArrayList<Instance>();
-
-        RunInstancesRequest request = new RunInstancesRequest(amiId, number, number);
-        request.setInstanceType(type);
-        request.setKeyName(keyname);
-
-        RunInstancesResult result = getEC2Client().runInstances(request);
-        resultList.addAll(result.getReservation().getInstances());
-        return resultList;
-    }
-
-    /**
-     * Load instances
-     *
-     * @return list of instances
-     */
-    public static List<Instance> loadInstances() {
-        List<Instance> resultList = new ArrayList<Instance>();
-        DescribeInstancesResult describeInstancesResult = getEC2Client().describeInstances();
-        List<Reservation> reservations = describeInstancesResult.getReservations();
-        for (Iterator<Reservation> iterator = reservations.iterator(); iterator.hasNext();) {
-            Reservation reservation = iterator.next();
-            for (Instance instance : reservation.getInstances()) {
-                resultList.add(instance);
-            }
-        }
-        return resultList;
-    }
-
-    /**
-     * Load key pairs
-     *
-     * @return list of keypairs
-     */
-    public static List<String> loadKeypairs(){
-        List<String> resultList = new ArrayList<String>();
-        DescribeKeyPairsResult results = getEC2Client().describeKeyPairs();
-        for (KeyPairInfo key : results.getKeyPairs()) {
-            resultList.add(key.getKeyName());
-        }
-        return resultList;
-    }
-
-    /**
-     * Terminate instances
-     *
-     * @param instanceIds instance ids of the running instances.
-     */
-    public static void terminateInstances(List<String> instanceIds) {
-        // terminate
-        TerminateInstancesRequest request = new TerminateInstancesRequest(instanceIds);
-        getEC2Client().terminateInstances(request);
-    }
-
-    /**
-     * Terminate instances
-     *
-     * @param instanceIds  instance ids of the running instances.
-     */
-    public static void terminateInstances(String... instanceIds) {
-        // terminate
-        TerminateInstancesRequest request = new TerminateInstancesRequest();
-        getEC2Client().terminateInstances(request.withInstanceIds(instanceIds));
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
deleted file mode 100644
index 29efb73..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
+++ /dev/null
@@ -1,365 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.*;
-import com.sshtools.j2ssh.SshClient;
-import com.sshtools.j2ssh.authentication.AuthenticationProtocolState;
-import com.sshtools.j2ssh.authentication.PublicKeyAuthenticationClient;
-import com.sshtools.j2ssh.configuration.SshConnectionProperties;
-import com.sshtools.j2ssh.session.SessionChannelClient;
-import com.sshtools.j2ssh.transport.HostKeyVerification;
-import com.sshtools.j2ssh.transport.TransportProtocolException;
-import com.sshtools.j2ssh.transport.publickey.InvalidSshKeyException;
-import com.sshtools.j2ssh.transport.publickey.SshPrivateKey;
-import com.sshtools.j2ssh.transport.publickey.SshPrivateKeyFile;
-import com.sshtools.j2ssh.transport.publickey.SshPublicKey;
-import org.airavata.appcatalog.cpi.AppCatalogException;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.provider.utils.ProviderUtils;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.ec2.util.AmazonEC2Util;
-import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.CloudJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.ProviderName;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Calendar;
-import java.util.List;
-import java.util.Map;
-
-public class EC2Provider extends AbstractProvider {
-
-    private static final Logger log = LoggerFactory.getLogger(EC2Provider.class);
-
-    public static final int SOCKET_TIMEOUT = 30000;
-
-    public static final int SSH_PORT = 22;
-
-    public static final String KEY_PAIR_NAME = "ec2_rsa";
-
-    private Instance instance = null;
-
-    private AmazonSecurityContext amazonSecurityContext;
-    
-    private String jobId;
-    
-    private String taskID; 
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException{
-        if (jobExecutionContext != null) {
-    		jobId="EC2_"+jobExecutionContext.getHostName()+"_"+Calendar.getInstance().getTimeInMillis();
-            if (jobExecutionContext.getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT)
-                    instanceof AmazonSecurityContext) {
-                this.amazonSecurityContext = (AmazonSecurityContext) jobExecutionContext.
-                        getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT);
-            } else {
-                throw new GFacProviderException("Amazon Security Context is not set" + jobExecutionContext);
-            }
-        } else {
-            throw new GFacProviderException("Job Execution Context is null" + jobExecutionContext);
-        }
-
-        if (log.isDebugEnabled()) {
-            log.debug("ACCESS_KEY:" + amazonSecurityContext.getAccessKey());
-            log.debug("SECRET_KEY:" + amazonSecurityContext.getSecretKey());
-            log.debug("AMI_ID:" + amazonSecurityContext.getAmiId());
-            log.debug("INS_ID:" + amazonSecurityContext.getInstanceId());
-            log.debug("INS_TYPE:" + amazonSecurityContext.getInstanceType());
-            log.debug("USERNAME:" + amazonSecurityContext.getUserName());
-        }
-//        job
-        details.setJobID(jobId);
-        /* Validation */
-        if (amazonSecurityContext.getAccessKey() == null || amazonSecurityContext.getAccessKey().isEmpty())
-            throw new GFacProviderException("EC2 Access Key is empty");
-        if (amazonSecurityContext.getSecretKey() == null || amazonSecurityContext.getSecretKey().isEmpty())
-            throw new GFacProviderException("EC2 Secret Key is empty");
-        if ((amazonSecurityContext.getAmiId() == null && amazonSecurityContext.getInstanceId() == null) ||
-                (amazonSecurityContext.getAmiId() != null && amazonSecurityContext.getAmiId().isEmpty()) ||
-                (amazonSecurityContext.getInstanceId() != null && amazonSecurityContext.getInstanceId().isEmpty()))
-            throw new GFacProviderException("EC2 AMI or Instance ID is empty");
-        if (amazonSecurityContext.getUserName() == null || amazonSecurityContext.getUserName().isEmpty())
-            throw new GFacProviderException("EC2 Username is empty");
-
-        /* Need to start EC2 instance before running it */
-        AWSCredentials credential =
-                new BasicAWSCredentials(amazonSecurityContext.getAccessKey(), amazonSecurityContext.getSecretKey());
-        AmazonEC2Client ec2client = new AmazonEC2Client(credential);
-        taskID = jobExecutionContext.getTaskData().getTaskID();
-		GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
-        initEc2Environment(jobExecutionContext, ec2client);
-        checkConnection(instance, ec2client);
-    }
-
-	
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-    
-        String shellCmd = createShellCmd(jobExecutionContext);
-//        AiravataAPI airavataAPI = jobExecutionContext.getGFacConfiguration().getAiravataAPI();
-//        if (airavataAPI!=null){
-//        	try {
-//				airavataAPI.getProvenanceManager().updateApplicationJobData(jobId, shellCmd);
-//			} catch (AiravataAPIInvocationException e) {
-//				log.error("Error in saving EC2 shell command!!!", e);
-//			}
-//        }
-        SshClient sshClient = new SshClient();
-        sshClient.setSocketTimeout(SOCKET_TIMEOUT);
-        SshConnectionProperties properties = new SshConnectionProperties();
-        properties.setHost(this.instance.getPublicDnsName());
-        properties.setPort(SSH_PORT);
-
-        // Connect to the host
-        try
-        {
-            String outParamName;
-            List<OutputDataObjectType> outputs = jobExecutionContext.getApplicationContext().getApplicationInterfaceDescription().getApplicationOutputs();
-            if(outputs != null && !outputs.isEmpty()) {
-                outParamName = outputs.get(0).getName();
-            } else {
-                throw new GFacProviderException("Output parameter name is not set. Therefore, not being able " +
-                        "to filter the job result from standard out ");
-            }
-
-            sshClient.connect(properties, new HostKeyVerification() {
-                public boolean verifyHost(String s, SshPublicKey sshPublicKey) throws TransportProtocolException {
-                    log.debug("Verifying Host: " + s);
-                    return true;
-                }
-            });
-            // Initialize the authentication data.
-            PublicKeyAuthenticationClient publicKeyAuth = new PublicKeyAuthenticationClient();
-            publicKeyAuth.setUsername(amazonSecurityContext.getUserName());
-            SshPrivateKeyFile file = SshPrivateKeyFile.
-                    parse(new File(System.getProperty("user.home") + "/.ssh/" + KEY_PAIR_NAME));
-            SshPrivateKey privateKey = file.toPrivateKey("");
-            publicKeyAuth.setKey(privateKey);
-
-            // Authenticate
-            int result = sshClient.authenticate(publicKeyAuth);
-            if(result== AuthenticationProtocolState.FAILED) {
-            	GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.FAILED);
-                throw new GFacProviderException("The authentication failed");
-            } else if(result==AuthenticationProtocolState.PARTIAL) {
-                throw new GFacProviderException("The authentication succeeded but another"
-                        + "authentication is required");
-            } else if(result==AuthenticationProtocolState.COMPLETE) {
-                log.info("ssh client authentication is complete...");
-            }
-            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SUBMITTED);
-            SessionChannelClient session = sshClient.openSessionChannel();
-            log.info("ssh session successfully opened...");
-            session.requestPseudoTerminal("vt100", 80, 25, 0, 0, "");
-            session.startShell();
-            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.ACTIVE);
-              
-            session.getOutputStream().write(shellCmd.getBytes());
-
-            InputStream in = session.getInputStream();
-            byte buffer[] = new byte[255];
-            int read;
-            String executionResult = "";
-            while((read = in.read(buffer)) > 0) {
-                String out = new String(buffer, 0, read);
-//                System.out.println(out);
-
-                if(out.startsWith(outParamName)) {
-                    executionResult = out.split("=")[1];
-                    log.debug("Result found in the StandardOut ");
-                    break;
-                }
-            }
-         
-            executionResult = executionResult.replace("\r","").replace("\n","");
-            log.info("Result of the job : " + executionResult);
-
-            for(OutputDataObjectType outparamType : outputs){
-                /* Assuming that there is just a single result. If you want to add more results, update the necessary
-                   logic below */
-                String paramName = outparamType.getName();
-                String value = outparamType.getValue();
-                jobExecutionContext.getOutMessageContext().addParameter(paramName, value);
-            }
-            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.COMPLETE);
-        } catch (InvalidSshKeyException e) {
-            throw new GFacProviderException("Invalid SSH key", e);
-        } catch (IOException e) {
-            throw new GFacProviderException("Error in occurred during IO", e);
-        } catch (Exception e) {
-            throw new GFacProviderException("Error parsing standard out for job execution result", e);
-        }
-
-    }
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        // Do nothing
-    }
-
-    public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacException {
-        throw new NotImplementedException();
-    }
-
-    /**
-     * Creates the command to be executed in the remote shell.
-     *
-     * @param jobExecutionContext JobExecutionContext for the cloud job
-     * @return shell command to be executed
-     * @throws GFacProviderException GFacProviderException
-     */
-    private String createShellCmd(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        try {
-            String command = "";
-            JobSubmissionInterface submissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
-            CloudJobSubmission cloudJobSubmission = GFacUtils.getCloudJobSubmission(submissionInterface.getJobSubmissionInterfaceId());
-            String executablePath = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getExecutablePath();
-            if (cloudJobSubmission.getProviderName().equals(ProviderName.EC2)) {
-                if (cloudJobSubmission.getExecutableType() != null) {
-                    command = cloudJobSubmission.getExecutableType() + " " + executablePath;
-                } else {
-                    command = "sh" + " " + executablePath;
-                }
-                command = setCmdParams(jobExecutionContext, command);
-
-            } else {
-                command = "sh" + " " + executablePath;
-                command = setCmdParams(jobExecutionContext, command);
-            }
-            return command + '\n';
-        } catch (AppCatalogException e) {
-            log.error("Error while retrieving cloud job submission", e);
-            throw new GFacProviderException("Error while retrieving cloud job submission", e);
-        }
-    }
-
-    private String setCmdParams(JobExecutionContext jobExecutionContext, String command) throws GFacProviderException {
-        List<String> inputParams = null;
-        try {
-            inputParams = ProviderUtils.getInputParameters(jobExecutionContext);
-        } catch (GFacProviderException e) {
-            throw new GFacProviderException("Error in extracting input values from JobExecutionContext");
-        }
-
-        for(String param : inputParams){
-            command = " " + command + " " + param;
-        }
-
-        log.info("Command to be executed on EC2 : " + command);
-        return command;
-    }
-
-    /**
-     * Checks whether the port 22 of the Amazon instance is accessible.
-     *
-     * @param instance Amazon instance id.
-     * @param ec2client AmazonEC2Client object
-     */
-    private void checkConnection(Instance instance, AmazonEC2Client ec2client) {
-        /* Make sure port 22 is connectible */
-        for (GroupIdentifier g : instance.getSecurityGroups()) {
-            IpPermission ip = new IpPermission();
-            ip.setIpProtocol("tcp");
-            ip.setFromPort(SSH_PORT);
-            ip.setToPort(SSH_PORT);
-            AuthorizeSecurityGroupIngressRequest r = new AuthorizeSecurityGroupIngressRequest();
-            r = r.withIpPermissions(ip.withIpRanges("0.0.0.0/0"));
-            r.setGroupId(g.getGroupId());
-            try {
-                ec2client.authorizeSecurityGroupIngress(r);
-            } catch (AmazonServiceException as) {
-                /* If exception is from duplicate room, ignore it. */
-                if (!as.getErrorCode().equals("InvalidPermission.Duplicate"))
-                    throw as;
-            }
-        }
-    }
-
-    /**
-     * Initializes the Amazon EC2 environment needed to run the Cloud job submission. This will bring
-     * up an Amazon instance (out of an AMI) or use an existing instance id.
-     *
-     * @param jobExecutionContext Job execution context.
-     * @param ec2client EC2 Client.
-     * @return instance id of the running Amazon instance.
-     * @throws GFacProviderException
-     */
-    private void initEc2Environment(JobExecutionContext jobExecutionContext, AmazonEC2Client ec2client)
-            throws GFacProviderException {
-        try {
-            /* Build key pair before start instance */
-            EC2ProviderUtil.buildKeyPair(ec2client, KEY_PAIR_NAME);
-
-            // right now, we can run it on one host
-            if (amazonSecurityContext.getAmiId() != null)
-                instance = AmazonEC2Util.startInstances(ec2client, amazonSecurityContext.getAmiId(),
-                        amazonSecurityContext.getInstanceType(), jobExecutionContext, KEY_PAIR_NAME).get(0);
-            else {
-
-                // already running instance
-                DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest();
-                DescribeInstancesResult describeInstancesResult =
-                        ec2client.describeInstances(describeInstancesRequest.
-                                withInstanceIds(amazonSecurityContext.getInstanceId()));
-
-                if (describeInstancesResult.getReservations().size() == 0 ||
-                        describeInstancesResult.getReservations().get(0).getInstances().size() == 0) {
-                    throw new GFacProviderException("Instance not found:" + amazonSecurityContext.getInstanceId());
-                }
-
-                instance = describeInstancesResult.getReservations().get(0).getInstances().get(0);
-
-                // check instance keypair
-                if (instance.getKeyName() == null || !instance.getKeyName().equals(KEY_PAIR_NAME)) {
-                    throw new GFacProviderException("Keypair for instance:" + amazonSecurityContext.getInstanceId() +
-                            " is not valid");
-                }
-            }
-
-            jobExecutionContext.getNotificationService().publish(new EC2ProviderEvent("EC2 Instance " +
-                    this.instance.getInstanceId() + " is running with public name " + this.instance.getPublicDnsName()));
-
-        } catch (Exception e) {
-            throw new GFacProviderException("Invalid Request",e);
-        }
-
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-        // do nothing
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java
deleted file mode 100644
index 42241c4..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import org.apache.airavata.gfac.core.notification.events.GFacEvent;
-
-public class EC2ProviderEvent extends GFacEvent {
-    String statusMessage;
-
-    public EC2ProviderEvent(String message){
-        this.eventType = EC2ProviderEvent.class.getSimpleName();
-        statusMessage = message;
-    }
-
-    public String getStatusMessage() {
-        return statusMessage;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java
deleted file mode 100644
index 485724e..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.Instance;
-
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class GreedyScheduler implements SchedulingAlgorithm {
-
-    /**
-     * Returns the amazon instance id of the amazon instance which is having the minimum
-     * CPU utilization (out of the already running instances). If the instance which
-     * is having the minimum CPU utilization exceeds 80%, ami-id will be returned
-     * instead of a an instance id. If a particular running instance's uptime is
-     * greater than 55 minutes, that instance will be shut down.
-     *
-     * @return instance id
-     * @throws java.security.NoSuchAlgorithmException
-     * @throws java.security.spec.InvalidKeySpecException
-     * @throws java.io.IOException
-     */
-    public String getScheduledAmazonInstance(AmazonEC2Client ec2client, String imageId, AWSCredentials credential)
-            throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
-
-        Map<String, Double> instanceUtilMap = new HashMap<String, Double>();
-        List<Instance> instanceList = AmazonInstanceScheduler.loadInstances(ec2client);
-        // If there are no instances created at this point return the imageId
-        if(instanceList.isEmpty()){
-            return imageId;
-        }
-
-        for (Instance instance : instanceList) {
-            String instanceImageId = instance.getImageId();
-            String instanceId = instance.getInstanceId();
-            double avgCPUUtilization = AmazonInstanceScheduler.monitorInstance(credential, instanceId);
-
-            System.out.println("Image id         : " + instanceImageId);
-            System.out.println("Instance id      : " + instanceId);
-            System.out.println("CPU Utilization  : " + avgCPUUtilization);
-
-            //Storing the instance id, if that particular instance was created by the given AMI(imageId)
-            if(imageId.equalsIgnoreCase(instanceImageId)) {
-                instanceUtilMap.put(instanceId, avgCPUUtilization);
-            }
-        }
-
-        // Selects the instance with minimum CPU utilization
-        Map.Entry<String, Double> min = null;
-        for (Map.Entry<String, Double> entry : instanceUtilMap.entrySet()) {
-            if (min == null || min.getValue() > entry.getValue()) {
-                min = entry;
-            }
-        }
-
-        if((min!=null) && (min.getValue()<80)) {
-            System.out.println("Use the existing instance " + min.getKey() + " with CPU Utilization : " + min.getValue());
-            return min.getKey();
-        } else {
-            System.out.println("Create a new instance using AMI : " + imageId);
-            return imageId;
-        }
-    }
-
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java
deleted file mode 100644
index 1fb77fd..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-
-public interface SchedulingAlgorithm {
-
-    String getScheduledAmazonInstance(AmazonEC2Client ec2client, String imageId, AWSCredentials credential)
-            throws NoSuchAlgorithmException, InvalidKeySpecException, IOException;
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java
deleted file mode 100644
index 81b4380..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2.util;
-
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.*;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.ec2.EC2ProviderEvent;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/* This class holds the commonly used methods to communicate with Amazon EC2 environment*/
-public class AmazonEC2Util {
-
-    public static final int SLEEP_TIME_SECOND = 120;
-
-    /**
-     * Starts an Amazon instance with the given information.
-     *
-     * @param ec2 Amazon ec2 client
-     * @param amiId Amazon Machine Image (AMI) id
-     * @param insType Instance type
-     * @param jobExecutionContext Job Execution context
-     * @param keyPairName Key pair name
-     * @return list of instances
-     * @throws AmazonServiceException AmazonServiceException
-     */
-    public static List<Instance> startInstances(AmazonEC2Client ec2, String amiId, String insType,
-                                          JobExecutionContext jobExecutionContext, String keyPairName)
-            throws AmazonServiceException {
-        // start only 1 instance
-        RunInstancesRequest request = new RunInstancesRequest(amiId, 1, 1);
-        request.setKeyName(keyPairName);
-        request.setInstanceType(insType);
-
-        RunInstancesResult result = ec2.runInstances(request);
-
-        List<Instance> instances = result.getReservation().getInstances();
-
-        while (!allInstancesStateEqual(instances, InstanceStateName.Running)) {
-
-            // instance status should not be Terminated
-            if (anyInstancesStateEqual(instances, InstanceStateName.Terminated)) {
-                throw new AmazonClientException("Some Instance is terminated before running a job");
-            }
-
-            // notify the status
-            for (Instance ins: instances) {
-                jobExecutionContext.getNotificationService().publish(new EC2ProviderEvent("EC2 Instance " +
-                        ins.getInstanceId() + " is " + ins.getState().getName()));
-            }
-
-            try {
-                Thread.sleep(SLEEP_TIME_SECOND * 1000l);
-            } catch (Exception ex) {
-                // no op
-            }
-
-            DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest();
-            describeInstancesRequest.setInstanceIds(getInstanceIDs(instances));
-
-            DescribeInstancesResult describeInstancesResult = ec2.describeInstances(describeInstancesRequest);
-            instances = describeInstancesResult.getReservations().get(0).getInstances();
-        }
-
-        return instances;
-    }
-
-    public static boolean anyInstancesStateEqual(List<Instance> instances, InstanceStateName name) {
-        for (Instance instance : instances) {
-            // if one of instance is not running, return false
-            if (InstanceStateName.fromValue(instance.getState().getName()) == name) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    public static boolean allInstancesStateEqual(List<Instance> instances, InstanceStateName name) {
-        for (Instance instance : instances) {
-            // if one of instance is not running, return false
-            if (InstanceStateName.fromValue(instance.getState().getName()) != name) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    public static List<String> getInstanceIDs(List<Instance> instances) {
-        List<String> ret = new ArrayList<String>();
-        for (Instance instance : instances) {
-            ret.add(instance.getInstanceId());
-        }
-        return ret;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java
deleted file mode 100644
index 4d7fab7..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2.util;
-
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.DeleteKeyPairRequest;
-import com.amazonaws.services.ec2.model.DescribeKeyPairsRequest;
-import com.amazonaws.services.ec2.model.ImportKeyPairRequest;
-import com.sshtools.j2ssh.util.Base64;
-
-import java.io.*;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-
-import org.bouncycastle.openssl.PEMWriter;
-
-/*This class holds the utility methods used for the EC2Provider*/
-public class EC2ProviderUtil {
-
-    /**
-     * Builds a key pair with the given AmazonEC2Client and the generated key will have
-     * the name keyPairName.
-     *
-     * @param ec2 ec2client
-     * @param keyPairName name for the generated key pair
-     * @throws NoSuchAlgorithmException NoSuchAlgorithmException
-     * @throws InvalidKeySpecException InvalidKeySpecException
-     * @throws AmazonServiceException AmazonServiceException
-     * @throws AmazonClientException AmazonClientException
-     * @throws IOException IOException
-     */
-    public static void buildKeyPair(AmazonEC2Client ec2, String keyPairName)
-            throws NoSuchAlgorithmException, InvalidKeySpecException,
-            AmazonServiceException, AmazonClientException, IOException {
-        boolean newKey = false;
-
-        String privateKeyFilePath = System.getProperty("user.home") + "/.ssh/" + keyPairName;
-        File privateKeyFile = new File(privateKeyFilePath);
-        File publicKeyFile = new File(privateKeyFilePath + ".pub");
-
-        /* Check if Key-pair already created on the server */
-        if (!privateKeyFile.exists()) {
-
-            // check folder and create if it does not exist
-            File sshDir = new File(System.getProperty("user.home") + "/.ssh/");
-            if (!sshDir.exists())
-                sshDir.mkdir();
-
-            // Generate a 1024-bit RSA key pair
-            KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA");
-            keyGen.initialize(1024);
-            KeyPair keypair = keyGen.genKeyPair();
-
-            FileOutputStream fos = null;
-
-            // Store Public Key.
-            try {
-                fos = new FileOutputStream(privateKeyFilePath + ".pub");
-                fos.write(Base64.encodeBytes(keypair.getPublic().getEncoded(), true).getBytes());
-            } catch (IOException ioe) {
-                throw ioe;
-            } finally {
-                if (fos != null) {
-                    try {
-                        fos.close();
-                        fos = null;
-                    } catch (IOException ioe) {
-                        throw ioe;
-                    }
-                }
-            }
-
-            // Store Private Key.
-            try {
-                fos = new FileOutputStream(privateKeyFilePath);
-                StringWriter stringWriter = new StringWriter();
-
-                /* Write in PEM format (openssl support) */
-                PEMWriter pemFormatWriter = new PEMWriter(stringWriter);
-                pemFormatWriter.writeObject(keypair.getPrivate());
-                pemFormatWriter.close();
-                fos.write(stringWriter.toString().getBytes());
-            } catch (IOException ioe) {
-                throw ioe;
-            } finally {
-                if (fos != null) {
-                    try {
-                        fos.close();
-                        fos = null;
-                    } catch (IOException ioe) {
-                        throw ioe;
-                    }
-                }
-            }
-
-            privateKeyFile.setWritable(false, false);
-            privateKeyFile.setExecutable(false, false);
-            privateKeyFile.setReadable(false, false);
-            privateKeyFile.setReadable(true);
-            privateKeyFile.setWritable(true);
-
-            // set that this key is just created
-            newKey = true;
-        }
-
-        /* Read Public Key */
-        String encodedPublicKey = null;
-        BufferedReader br = null;
-        try {
-            br = new BufferedReader(new FileReader(publicKeyFile));
-            encodedPublicKey = br.readLine();
-        } catch (IOException ioe) {
-            throw ioe;
-        } finally {
-            if (br != null) {
-                try {
-                    br.close();
-                    br = null;
-                } catch (IOException ioe) {
-                    throw ioe;
-                }
-            }
-        }
-
-        /* Generate key pair in Amazon if necessary */
-        try {
-            /* Get current key pair in Amazon */
-            DescribeKeyPairsRequest describeKeyPairsRequest = new DescribeKeyPairsRequest();
-            ec2.describeKeyPairs(describeKeyPairsRequest.withKeyNames(keyPairName));
-
-            /* If key exists and new key is created, delete old key and replace
-             * with new one. Else, do nothing */
-            if (newKey) {
-                DeleteKeyPairRequest deleteKeyPairRequest = new DeleteKeyPairRequest(keyPairName);
-                ec2.deleteKeyPair(deleteKeyPairRequest);
-                ImportKeyPairRequest importKeyPairRequest = new ImportKeyPairRequest(keyPairName, encodedPublicKey);
-                ec2.importKeyPair(importKeyPairRequest);
-            }
-
-        } catch (AmazonServiceException ase) {
-            /* Key doesn't exists, import new key. */
-            if (ase.getErrorCode().equals("InvalidKeyPair.NotFound")) {
-                ImportKeyPairRequest importKeyPairRequest = new ImportKeyPairRequest(keyPairName, encodedPublicKey);
-                ec2.importKeyPair(importKeyPairRequest);
-            } else {
-                throw ase;
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/resources/errors.properties b/modules/gfac/gfac-ec2/src/main/resources/errors.properties
deleted file mode 100644
index 88c41b8..0000000
--- a/modules/gfac/gfac-ec2/src/main/resources/errors.properties
+++ /dev/null
@@ -1,197 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# Directly copied from jglobus. Not a good way to manager error properties.
-1 = Parameter not supported
-2 = The RSL length is greater than the maximum allowed
-3 = No resources available
-4 = Bad directory specified
-5 = The executable does not exist
-6 = Insufficient funds
-7 = Authentication with the remote server failed
-8 = Job cancelled by user
-9 = Job cancelled by system
-
-10 = Data transfer to the server failed
-11 = The stdin file does not exist
-12 = The connection to the server failed (check host and port)
-13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
-14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
-15 = The job manager received an invalid RSL
-16 = Could not connect to job manager
-17 = The job failed when the job manager attempted to run it
-18 = Paradyn error
-19 = The provided RSL 'jobtype' value is invalid
-
-20 = The provided RSL 'myjob' value is invalid
-21 = The job manager failed to locate an internal script argument file
-22 = The job manager failed to create an internal script argument file
-23 = The job manager detected an invalid job state
-24 = The job manager detected an invalid script response
-25 = The job manager detected an invalid job state
-26 = The provided RSL 'jobtype' value is not supported by this job manager
-27 = Unimplemented
-28 = The job manager failed to create an internal script submission file
-29 = The job manager cannot find the user proxy
-
-30 = The job manager failed to open the user proxy
-31 = The job manager failed to cancel the job as requested
-32 = System memory allocation failed
-33 = The interprocess job communication initialization failed
-34 = The interprocess job communication setup failed
-35 = The provided RSL 'host count' value is invalid
-36 = One of the provided RSL parameters is unsupported
-37 = The provided RSL 'queue' parameter is invalid
-38 = The provided RSL 'project' parameter is invalid
-39 = The provided RSL string includes variables that could not be identified
-
-40 = The provided RSL 'environment' parameter is invalid
-41 = The provided RSL 'dryrun' parameter is invalid
-42 = The provided RSL is invalid (an empty string)
-43 = The job manager failed to stage the executable
-44 = The job manager failed to stage the stdin file
-45 = The requested job manager type is invalid
-46 = The provided RSL 'arguments' parameter is invalid
-47 = The gatekeeper failed to run the job manager
-48 = The provided RSL could not be properly parsed
-49 = There is a version mismatch between GRAM components
-
-50 = The provided RSL 'arguments' parameter is invalid
-51 = The provided RSL 'count' parameter is invalid
-52 = The provided RSL 'directory' parameter is invalid
-53 = The provided RSL 'dryrun' parameter is invalid
-54 = The provided RSL 'environment' parameter is invalid
-55 = The provided RSL 'executable' parameter is invalid
-56 = The provided RSL 'host_count' parameter is invalid
-57 = The provided RSL 'jobtype' parameter is invalid
-58 = The provided RSL 'maxtime' parameter is invalid
-59 = The provided RSL 'myjob' parameter is invalid
-
-60 = The provided RSL 'paradyn' parameter is invalid
-61 = The provided RSL 'project' parameter is invalid
-62 = The provided RSL 'queue' parameter is invalid
-63 = The provided RSL 'stderr' parameter is invalid
-64 = The provided RSL 'stdin' parameter is invalid
-65 = The provided RSL 'stdout' parameter is invalid
-66 = The job manager failed to locate an internal script
-67 = The job manager failed on the system call pipe()
-68 = The job manager failed on the system call fcntl()
-69 = The job manager failed to create the temporary stdout filename
-
-70 = The job manager failed to create the temporary stderr filename
-71 = The job manager failed on the system call fork()
-72 = The executable file permissions do not allow execution
-73 = The job manager failed to open stdout
-74 = The job manager failed to open stderr
-75 = The cache file could not be opened in order to relocate the user proxy
-76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
-77 = The job manager failed to insert the contact in the client contact list
-78 = The contact was not found in the job manager's client contact list
-79 = Connecting to the job manager failed.  Possible reasons: job terminated, invalid job contact, network problems, ...
-
-80 = The syntax of the job contact is invalid
-81 = The executable parameter in the RSL is undefined
-82 = The job manager service is misconfigured.  condor arch undefined
-83 = The job manager service is misconfigured.  condor os undefined
-84 = The provided RSL 'min_memory' parameter is invalid
-85 = The provided RSL 'max_memory' parameter is invalid
-86 = The RSL 'min_memory' value is not zero or greater
-87 = The RSL 'max_memory' value is not zero or greater
-88 = The creation of a HTTP message failed
-89 = Parsing incoming HTTP message failed
-
-90 = The packing of information into a HTTP message failed
-91 = An incoming HTTP message did not contain the expected information
-92 = The job manager does not support the service that the client requested
-93 = The gatekeeper failed to find the requested service
-94 = The jobmanager does not accept any new requests (shutting down)
-95 = The client failed to close the listener associated with the callback URL
-96 = The gatekeeper contact cannot be parsed
-97 = The job manager could not find the 'poe' command
-98 = The job manager could not find the 'mpirun' command
-99 = The provided RSL 'start_time' parameter is invalid"
-100 = The provided RSL 'reservation_handle' parameter is invalid
-
-101 = The provided RSL 'max_wall_time' parameter is invalid
-102 = The RSL 'max_wall_time' value is not zero or greater
-103 = The provided RSL 'max_cpu_time' parameter is invalid
-104 = The RSL 'max_cpu_time' value is not zero or greater
-105 = The job manager is misconfigured, a scheduler script is missing
-106 = The job manager is misconfigured, a scheduler script has invalid permissions
-107 = The job manager failed to signal the job
-108 = The job manager did not recognize/support the signal type
-109 = The job manager failed to get the job id from the local scheduler
-
-110 = The job manager is waiting for a commit signal
-111 = The job manager timed out while waiting for a commit signal
-112 = The provided RSL 'save_state' parameter is invalid
-113 = The provided RSL 'restart' parameter is invalid
-114 = The provided RSL 'two_phase' parameter is invalid
-115 = The RSL 'two_phase' value is not zero or greater
-116 = The provided RSL 'stdout_position' parameter is invalid
-117 = The RSL 'stdout_position' value is not zero or greater
-118 = The provided RSL 'stderr_position' parameter is invalid
-119 = The RSL 'stderr_position' value is not zero or greater
-
-120 = The job manager restart attempt failed
-121 = The job state file doesn't exist
-122 = Could not read the job state file
-123 = Could not write the job state file
-124 = The old job manager is still alive
-125 = The job manager state file TTL expired
-126 = It is unknown if the job was submitted
-127 = The provided RSL 'remote_io_url' parameter is invalid
-128 = Could not write the remote io url file
-129 = The standard output/error size is different
-
-130 = The job manager was sent a stop signal (job is still running)
-131 = The user proxy expired (job is still running)
-132 = The job was not submitted by original jobmanager
-133 = The job manager is not waiting for that commit signal
-134 = The provided RSL scheduler specific parameter is invalid
-135 = The job manager could not stage in a file
-136 = The scratch directory could not be created
-137 = The provided 'gass_cache' parameter is invalid
-138 = The RSL contains attributes which are not valid for job submission
-139 = The RSL contains attributes which are not valid for stdio update
-
-140 = The RSL contains attributes which are not valid for job restart
-141 = The provided RSL 'file_stage_in' parameter is invalid
-142 = The provided RSL 'file_stage_in_shared' parameter is invalid
-143 = The provided RSL 'file_stage_out' parameter is invalid
-144 = The provided RSL 'gass_cache' parameter is invalid
-145 = The provided RSL 'file_cleanup' parameter is invalid
-146 = The provided RSL 'scratch_dir' parameter is invalid
-147 = The provided scheduler-specific RSL parameter is invalid
-148 = A required RSL attribute was not defined in the RSL spec
-149 = The gass_cache attribute points to an invalid cache directory
-
-150 = The provided RSL 'save_state' parameter has an invalid value
-151 = The job manager could not open the RSL attribute validation file
-152 = The  job manager could not read the RSL attribute validation file
-153 = The provided RSL 'proxy_timeout' is invalid
-154 = The RSL 'proxy_timeout' value is not greater than zero
-155 = The job manager could not stage out a file
-156 = The job contact string does not match any which the job manager is handling
-157 = Proxy delegation failed
-158 = The job manager could not lock the state lock file
-
-1000 = Failed to start up callback handler
-1003 = Job contact not set

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/resources/service.properties b/modules/gfac/gfac-ec2/src/main/resources/service.properties
deleted file mode 100644
index 8275a10..0000000
--- a/modules/gfac/gfac-ec2/src/main/resources/service.properties
+++ /dev/null
@@ -1,57 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-#
-# Class which implemented Scheduler interface. It will be used to determine a Provider
-#
-scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
-
-#
-# Data Service Plugins classes
-#
-datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
-
-#
-# Pre execution Plugins classes. For example, GridFTP Input Staging
-#
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
-
-#
-# Post execution Plugins classes. For example, GridFTP Output Staging
-#
-postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
-postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
-
-#
-# SSH private key location. It will be used by SSHProvider
-#
-# ssh.key=/home/user/.ssh/id_rsa
-# ssh.keypass=
-# ssh.username=usernameAtHost
-
-#
-# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
-#
-# myproxy.server=myproxy.teragrid.org
-# myproxy.user=username
-# myproxy.pass=password
-# myproxy.life=3600
\ No newline at end of file