You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by sh...@apache.org on 2015/05/08 17:55:19 UTC

[4/4] airavata git commit: removed gfac-ec2, gfac-gram and gfac-hadoop modules from source.

removed gfac-ec2, gfac-gram and gfac-hadoop modules from source.


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/70239916
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/70239916
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/70239916

Branch: refs/heads/master
Commit: 70239916903505c8ec195108eed708f804d720b7
Parents: 742edee
Author: shamrath <sh...@gmail.com>
Authored: Fri May 8 11:54:58 2015 -0400
Committer: shamrath <sh...@gmail.com>
Committed: Fri May 8 11:54:58 2015 -0400

----------------------------------------------------------------------
 modules/gfac/gfac-ec2/pom.xml                   | 135 -----
 .../gfac/ec2/AmazonInstanceScheduler.java       | 233 --------
 .../gfac/ec2/AmazonSecurityContext.java         |  80 ---
 .../apache/airavata/gfac/ec2/AmazonUtil.java    | 142 -----
 .../apache/airavata/gfac/ec2/EC2Provider.java   | 365 ------------
 .../airavata/gfac/ec2/EC2ProviderEvent.java     |  37 --
 .../airavata/gfac/ec2/GreedyScheduler.java      |  92 ---
 .../airavata/gfac/ec2/SchedulingAlgorithm.java  |  36 --
 .../airavata/gfac/ec2/util/AmazonEC2Util.java   | 118 ----
 .../airavata/gfac/ec2/util/EC2ProviderUtil.java | 173 ------
 .../src/main/resources/errors.properties        | 197 -------
 .../src/main/resources/service.properties       |  57 --
 .../airavata/gfac/ec2/EC2ProviderTest.java      | 195 -------
 .../gfac/gfac-ec2/src/test/resources/echo.bat   |  22 -
 .../src/test/resources/logging.properties       |  42 --
 .../src/test/resources/service.properties       |  67 ---
 modules/gfac/gfac-gram/pom.xml                  | 124 -----
 .../airavata/gfac/gram/external/GridFtp.java    | 558 -------------------
 .../gram/handler/GramDirectorySetupHandler.java | 139 -----
 .../gfac/gram/handler/GridFTPInputHandler.java  | 203 -------
 .../gfac/gram/handler/GridFTPOutputHandler.java | 343 ------------
 .../persistence/DBJobPersistenceManager.java    | 225 --------
 .../gfac/gram/provider/impl/GramProvider.java   | 539 ------------------
 .../gfac/gram/security/GSISecurityContext.java  | 275 ---------
 .../gram/util/GramJobSubmissionListener.java    | 141 -----
 .../gfac/gram/util/GramProviderUtils.java       | 113 ----
 .../gfac/gram/util/GramRSLGenerator.java        | 211 -------
 .../gfac/gram/util/GridFTPContactInfo.java      |  61 --
 .../src/main/resources/errors.properties        | 197 -------
 .../src/main/resources/service.properties       |  58 --
 .../impl/GFacBaseTestWithMyProxyAuth.java       | 115 ----
 .../impl/GramProviderTestWithMyProxyAuth.java   | 225 --------
 .../src/test/resources/PBSTemplate.xslt         |  73 ---
 .../src/test/resources/logging.properties       |  42 --
 modules/gfac/gfac-hadoop/pom.xml                | 116 ----
 .../hadoop/handler/HDFSDataMovementHandler.java | 103 ----
 .../hadoop/handler/HadoopDeploymentHandler.java | 276 ---------
 .../hadoop/provider/impl/HadoopProvider.java    | 154 -----
 .../gfac/hadoop/provider/utils/HadoopUtils.java |  60 --
 .../src/main/resources/errors.properties        | 197 -------
 .../src/main/resources/service.properties       |  58 --
 .../src/test/resources/PBSTemplate.xslt         |  73 ---
 .../src/test/resources/logging.properties       |  42 --
 modules/gfac/pom.xml                            |   3 -
 modules/workflow-model/workflow-engine/pom.xml  |   4 +-
 modules/xbaya-gui/pom.xml                       |   4 +-
 .../airavata/xbaya/XBayaConfiguration.java      |  18 -
 .../dialogs/amazon/ChangeCredentialWindow.java  |  19 +-
 48 files changed, 11 insertions(+), 6749 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/pom.xml b/modules/gfac/gfac-ec2/pom.xml
deleted file mode 100644
index 4568a11..0000000
--- a/modules/gfac/gfac-ec2/pom.xml
+++ /dev/null
@@ -1,135 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor 
-	license agreements. See the NOTICE file distributed with this work for additional 
-	information regarding copyright ownership. The ASF licenses this file to 
-	you under the Apache License, Version 2.0 (theƏ "License"); you may not use 
-	this file except in compliance with the License. You may obtain a copy of 
-	the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
-	by applicable law or agreed to in writing, software distributed under the 
-	License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
-	OF ANY ~ KIND, either express or implied. See the License for the specific 
-	language governing permissions and limitations under the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<parent>
-		<groupId>org.apache.airavata</groupId>
-		<artifactId>gfac</artifactId>
-		<version>0.15-SNAPSHOT</version>
-		<relativePath>../pom.xml</relativePath>
-	</parent>
-
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>airavata-gfac-ec2</artifactId>
-	<name>Airavata GFac EC2 Implementation</name>
-	<description>The core GFAC EC2 implementation using the framework features</description>
-	<url>http://airavata.apache.org/</url>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-gfac-core</artifactId>
-			<version>${project.version}</version>
-		</dependency>
-
-		<dependency>
-			<groupId>commons-configuration</groupId>
-			<artifactId>commons-configuration</artifactId>
-			<version>1.6</version>
-		</dependency>
-
-		<!-- Logging -->
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-api</artifactId>
-		</dependency>
-
-		<!-- GFAC schemas -->
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-workflow-execution-context</artifactId>
-			<version>${project.version}</version>
-		</dependency>
-		<!-- Workflow Tracking -->
-		<!--<dependency>-->
-			<!--<groupId>org.apache.airavata</groupId>-->
-			<!--<artifactId>airavata-workflow-tracking</artifactId>-->
-			<!--<version>${project.version}</version>-->
-		<!--</dependency>-->
-
-		<!-- SSH -->
-		<dependency>
-			<groupId>net.schmizz</groupId>
-			<artifactId>sshj</artifactId>
-			<version>0.8.0</version>
-		</dependency>
-
-		<!-- Credential Store -->
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-credential-store</artifactId>
-			<version>${project.version}</version>
-		</dependency>
-
-		<!-- Amazon EC2 Provider -->
-		<dependency>
-			<groupId>com.amazonaws</groupId>
-			<artifactId>aws-java-sdk</artifactId>
-			<version>1.3.20</version>
-		</dependency>
-		<dependency>
-			<groupId>sshtools</groupId>
-			<artifactId>j2ssh-core</artifactId>
-			<version>0.2.9</version>
-		</dependency>
-		<dependency>
-			<groupId>sshtools</groupId>
-			<artifactId>j2ssh-common</artifactId>
-			<version>0.2.9</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.httpcomponents</groupId>
-			<artifactId>httpclient</artifactId>
-			<version>4.3</version>
-			<type>jar</type>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.httpcomponents</groupId>
-			<artifactId>httpcore</artifactId>
-			<version>4.3</version>
-			<type>jar</type>
-		</dependency>
-
-		<!-- Test -->
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.testng</groupId>
-			<artifactId>testng</artifactId>
-			<version>6.1.1</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>jcl-over-slf4j</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-log4j12</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.airavata</groupId>
-			<artifactId>airavata-client-configuration</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.bouncycastle</groupId>
-			<artifactId>bcpkix-jdk15on</artifactId>
-		</dependency>
-	</dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java
deleted file mode 100644
index 9dd13dc..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonInstanceScheduler.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.cloudwatch.AmazonCloudWatchClient;
-import com.amazonaws.services.cloudwatch.model.Datapoint;
-import com.amazonaws.services.cloudwatch.model.Dimension;
-import com.amazonaws.services.cloudwatch.model.GetMetricStatisticsRequest;
-import com.amazonaws.services.cloudwatch.model.GetMetricStatisticsResult;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.DescribeInstancesResult;
-import com.amazonaws.services.ec2.model.Instance;
-import com.amazonaws.services.ec2.model.Reservation;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-public class AmazonInstanceScheduler {
-    private static final Logger log = LoggerFactory.getLogger(AmazonInstanceScheduler.class);
-
-    /* Maximum number of instances that the Scheduler will create*/
-    //private static final int MAX_INSTANCE_COUNT = 3;
-
-    /* Maximum number of minutes an instance should be kept alive*/
-    public static final int INSTANCE_UP_TIME_THRESHOLD = 60;
-
-    private static volatile AmazonInstanceScheduler scheduler = null;
-
-    private static String imageId = null;
-
-    private static AWSCredentials credential = null;
-
-    private static AmazonEC2Client ec2client = null;
-
-    /* The time interval(minutes) in which the instances will be checked whether they have timed-out*/
-    public static final long TERMINATE_THREAD_UPDATE_INTERVAL = 5;
-
-    public static AmazonInstanceScheduler getInstance(String imageId, String accessKey, String secretKey)
-            throws IOException, InvalidKeySpecException, NoSuchAlgorithmException {
-
-        if(scheduler == null) {
-            synchronized (AmazonInstanceScheduler.class) {
-                if(scheduler == null) {
-                    new Thread() {
-                        @Override
-                        public void run() {
-                            //noinspection InfiniteLoopStatement
-                            while(true) {
-                                try {
-                                    Thread.sleep(TERMINATE_THREAD_UPDATE_INTERVAL * 60 * 1000);
-                                } catch (InterruptedException e ) {
-                                    // do-nothing
-                                }
-
-                                try {
-                                    terminateTimedOutAmazonInstances();
-                                } catch (Throwable e) {
-                                    log.error(e.getMessage(), e);
-                                }
-                            }
-
-                        }
-
-                    }.start();
-
-                    scheduler = new AmazonInstanceScheduler();
-                }
-            }
-        }
-
-        AmazonInstanceScheduler.imageId = imageId;
-        AmazonInstanceScheduler.credential = new BasicAWSCredentials(accessKey, secretKey);
-        AmazonInstanceScheduler.ec2client = new AmazonEC2Client(credential);
-
-        return scheduler;
-    }
-
-
-    /**
-     * Returns the amazon instance id of the amazon instance which is having the minimum
-     * CPU utilization (out of the already running instances). If the instance which
-     * is having the minimum CPU utilization exceeds 80%, ami-id will be returned
-     * instead of a an instance id. If a particular running instance's uptime is
-     * greater than 55 minutes, that instance will be shut down.
-     *
-     * @return instance id
-     * @throws NoSuchAlgorithmException
-     * @throws InvalidKeySpecException
-     * @throws IOException
-     */
-    public String getScheduledAmazonInstance()
-            throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
-
-        SchedulingAlgorithm greedyAglo = new GreedyScheduler();
-        return greedyAglo.getScheduledAmazonInstance(ec2client,imageId, credential);
-    }
-
-    /**
-     * Terminates the Amazon instances that are timed out. Timed out refers to the
-     * instances which have been running for more than the INSTANCE_UP_TIME_THRESHOLD.
-     */
-    private static void terminateTimedOutAmazonInstances(){
-        System.out.println("Checking for timed-out instances");
-        List<Instance> instanceList = loadInstances(ec2client);
-        for (Instance instance : instanceList) {
-            String instanceId = instance.getInstanceId();
-
-            long upTime = getInstanceUptime(instance);
-            // if the instance up time is greater than the threshold, terminate the instance
-            if (upTime > INSTANCE_UP_TIME_THRESHOLD) {
-                List<String> requestIds = new ArrayList<String>();
-                requestIds.add(instanceId);
-                // terminate instance
-                System.out.println("Terminating the instance " + instanceId +
-                        " as the up time threshold is exceeded");
-                AmazonUtil.terminateInstances(requestIds);
-            }
-        }
-
-    }
-
-    /**
-     * Calculates the instance up time in minutes.
-     *
-     * @param instance instance to be monitored.
-     * @return up time of the instance.
-     */
-    private static long getInstanceUptime(Instance instance) {
-        Date startTime = instance.getLaunchTime();
-        Date today = new Date();
-        long diff = (today.getTime() - startTime.getTime()) / (1000 * 60);
-        System.out.println("Instance launch time   : " + startTime);
-        System.out.println("Instance up time (mins): " + diff);
-        return diff;
-    }
-
-    /**
-     * Monitors the CPU Utilization using Amazon Cloud Watch. In order to monitor the instance, Cloud Watch Monitoring
-     * should be enabled for the running instance.
-     *
-     * @param credential EC2 credentials
-     * @param instanceId instance id
-     * @return average CPU utilization of the instance
-     */
-    public static double monitorInstance(AWSCredentials credential, String instanceId) {
-        try {
-            AmazonCloudWatchClient cw = new AmazonCloudWatchClient(credential) ;
-
-            long offsetInMilliseconds = 1000 * 60 * 60 * 24;
-            GetMetricStatisticsRequest request = new GetMetricStatisticsRequest()
-                    .withStartTime(new Date(new Date().getTime() - offsetInMilliseconds))
-                    .withNamespace("AWS/EC2")
-                    .withPeriod(60 * 60)
-                    .withDimensions(new Dimension().withName("InstanceId").withValue(instanceId))
-                    .withMetricName("CPUUtilization")
-                    .withStatistics("Average", "Maximum")
-                    .withEndTime(new Date());
-            GetMetricStatisticsResult getMetricStatisticsResult = cw.getMetricStatistics(request);
-
-            double avgCPUUtilization = 0;
-            List dataPoint = getMetricStatisticsResult.getDatapoints();
-            for (Object aDataPoint : dataPoint) {
-                Datapoint dp = (Datapoint) aDataPoint;
-                avgCPUUtilization = dp.getAverage();
-                log.info(instanceId + " instance's average CPU utilization : " + dp.getAverage());
-            }
-
-            return avgCPUUtilization;
-
-        } catch (AmazonServiceException ase) {
-            log.error("Caught an AmazonServiceException, which means the request was made  "
-                    + "to Amazon EC2, but was rejected with an error response for some reason.");
-            log.error("Error Message:    " + ase.getMessage());
-            log.error("HTTP Status Code: " + ase.getStatusCode());
-            log.error("AWS Error Code:   " + ase.getErrorCode());
-            log.error("Error Type:       " + ase.getErrorType());
-            log.error("Request ID:       " + ase.getRequestId());
-
-        }
-        return 0;
-    }
-
-    /**
-     * Load instances associated with the given ec2 client
-     *
-     * @param ec2client ec2 client
-     * @return list of instances
-     */
-    public static List<Instance> loadInstances(AmazonEC2Client ec2client) {
-        List<Instance> resultList = new ArrayList<Instance>();
-        DescribeInstancesResult describeInstancesResult = ec2client.describeInstances();
-        List<Reservation> reservations = describeInstancesResult.getReservations();
-        for (Reservation reservation : reservations) {
-            for (Instance instance : reservation.getInstances()) {
-                System.out.println("instance       : " + instance);
-                if ("running".equalsIgnoreCase(instance.getState().getName())) {
-                    resultList.add(instance);
-                }
-            }
-        }
-        return resultList;
-    }
-
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java
deleted file mode 100644
index 75e55ae..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonSecurityContext.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import org.apache.airavata.gfac.SecurityContext;
-
-public class AmazonSecurityContext implements SecurityContext {
-
-	public static final String AMAZON_SECURITY_CONTEXT = "amazon";
-    private String userName;
-    private String accessKey;
-    private String secretKey;
-    private String amiId;
-    private String instanceType;
-    private String instanceId;
-    private boolean isRunningInstance = false;
-
-    public AmazonSecurityContext(String userName, String accessKey, String secretKey, String amiId, String instanceType) {
-        this.userName = userName;
-        this.accessKey = accessKey;
-        this.secretKey = secretKey;
-        this.amiId = amiId;
-        this.instanceType = instanceType;
-    }
-
-    public AmazonSecurityContext(String userName, String accessKey, String secretKey, String instanceId) {
-        this.userName = userName;
-        this.accessKey = accessKey;
-        this.secretKey = secretKey;
-        this.instanceId = instanceId;
-        this.isRunningInstance = true;
-    }
-
-    public String getAccessKey() {
-        return accessKey;
-    }
-
-    public String getSecretKey() {
-        return secretKey;
-    }
-
-    public String getInstanceId() {
-        return instanceId;
-    }
-
-    public String getInstanceType() {
-        return instanceType;
-    }
-
-    public String getAmiId() {
-        return amiId;
-    }
-
-    public boolean isRunningInstance() {
-        return isRunningInstance;
-    }
-
-    public String getUserName() {
-        return userName;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java
deleted file mode 100644
index 7814096..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/AmazonUtil.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.*;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-public class AmazonUtil {
-
-    /* Amazon EC2 instance type */
-    public final static String[] INSTANCE_TYPE =
-            { "t1.micro", "m1.small", "m1.large", "m1.xlarge", "m2.xlarge", "m2.2xlarge",
-                    "m2.4xlarge", "c1.medium", "c1.xlarge" };
-
-    private static AmazonEC2 getEC2Client() {
-        // FIXME : Fix this properly after adding UI components.
-        String accessKey = "";
-        String secretKey = "";
-        AmazonEC2 ec2 = new AmazonEC2Client(new BasicAWSCredentials(accessKey, secretKey));
-        return ec2;
-    }
-
-    /**
-     * Launch a new EC2 instance
-     *
-     * @param amiId
-     * @param type
-     * @param number
-     * @return list of newly launched instances
-     */
-    public static List<Instance> launchInstance(String amiId, String type, Integer number) {
-        List<Instance> resultList = new ArrayList<Instance>();
-
-        RunInstancesRequest request = new RunInstancesRequest(amiId, number, number);
-        request.setInstanceType(type);
-
-        RunInstancesResult result = getEC2Client().runInstances(request);
-        resultList.addAll(result.getReservation().getInstances());
-        return resultList;
-    }
-
-    /**
-     * Launch a new EC2 instance
-     *
-     * @param amiId
-     * @param type
-     * @param number
-     * @param keyname
-     * @return list of newly launched instances
-     */
-    public static List<Instance> launchInstance(String amiId, String type, Integer number, String keyname) {
-        List<Instance> resultList = new ArrayList<Instance>();
-
-        RunInstancesRequest request = new RunInstancesRequest(amiId, number, number);
-        request.setInstanceType(type);
-        request.setKeyName(keyname);
-
-        RunInstancesResult result = getEC2Client().runInstances(request);
-        resultList.addAll(result.getReservation().getInstances());
-        return resultList;
-    }
-
-    /**
-     * Load instances
-     *
-     * @return list of instances
-     */
-    public static List<Instance> loadInstances() {
-        List<Instance> resultList = new ArrayList<Instance>();
-        DescribeInstancesResult describeInstancesResult = getEC2Client().describeInstances();
-        List<Reservation> reservations = describeInstancesResult.getReservations();
-        for (Iterator<Reservation> iterator = reservations.iterator(); iterator.hasNext();) {
-            Reservation reservation = iterator.next();
-            for (Instance instance : reservation.getInstances()) {
-                resultList.add(instance);
-            }
-        }
-        return resultList;
-    }
-
-    /**
-     * Load key pairs
-     *
-     * @return list of keypairs
-     */
-    public static List<String> loadKeypairs(){
-        List<String> resultList = new ArrayList<String>();
-        DescribeKeyPairsResult results = getEC2Client().describeKeyPairs();
-        for (KeyPairInfo key : results.getKeyPairs()) {
-            resultList.add(key.getKeyName());
-        }
-        return resultList;
-    }
-
-    /**
-     * Terminate instances
-     *
-     * @param instanceIds instance ids of the running instances.
-     */
-    public static void terminateInstances(List<String> instanceIds) {
-        // terminate
-        TerminateInstancesRequest request = new TerminateInstancesRequest(instanceIds);
-        getEC2Client().terminateInstances(request);
-    }
-
-    /**
-     * Terminate instances
-     *
-     * @param instanceIds  instance ids of the running instances.
-     */
-    public static void terminateInstances(String... instanceIds) {
-        // terminate
-        TerminateInstancesRequest request = new TerminateInstancesRequest();
-        getEC2Client().terminateInstances(request.withInstanceIds(instanceIds));
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
deleted file mode 100644
index 29efb73..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
+++ /dev/null
@@ -1,365 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.*;
-import com.sshtools.j2ssh.SshClient;
-import com.sshtools.j2ssh.authentication.AuthenticationProtocolState;
-import com.sshtools.j2ssh.authentication.PublicKeyAuthenticationClient;
-import com.sshtools.j2ssh.configuration.SshConnectionProperties;
-import com.sshtools.j2ssh.session.SessionChannelClient;
-import com.sshtools.j2ssh.transport.HostKeyVerification;
-import com.sshtools.j2ssh.transport.TransportProtocolException;
-import com.sshtools.j2ssh.transport.publickey.InvalidSshKeyException;
-import com.sshtools.j2ssh.transport.publickey.SshPrivateKey;
-import com.sshtools.j2ssh.transport.publickey.SshPrivateKeyFile;
-import com.sshtools.j2ssh.transport.publickey.SshPublicKey;
-import org.airavata.appcatalog.cpi.AppCatalogException;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.core.provider.utils.ProviderUtils;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.ec2.util.AmazonEC2Util;
-import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.CloudJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.ProviderName;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Calendar;
-import java.util.List;
-import java.util.Map;
-
-public class EC2Provider extends AbstractProvider {
-
-    private static final Logger log = LoggerFactory.getLogger(EC2Provider.class);
-
-    public static final int SOCKET_TIMEOUT = 30000;
-
-    public static final int SSH_PORT = 22;
-
-    public static final String KEY_PAIR_NAME = "ec2_rsa";
-
-    private Instance instance = null;
-
-    private AmazonSecurityContext amazonSecurityContext;
-    
-    private String jobId;
-    
-    private String taskID; 
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException{
-        if (jobExecutionContext != null) {
-    		jobId="EC2_"+jobExecutionContext.getHostName()+"_"+Calendar.getInstance().getTimeInMillis();
-            if (jobExecutionContext.getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT)
-                    instanceof AmazonSecurityContext) {
-                this.amazonSecurityContext = (AmazonSecurityContext) jobExecutionContext.
-                        getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT);
-            } else {
-                throw new GFacProviderException("Amazon Security Context is not set" + jobExecutionContext);
-            }
-        } else {
-            throw new GFacProviderException("Job Execution Context is null" + jobExecutionContext);
-        }
-
-        if (log.isDebugEnabled()) {
-            log.debug("ACCESS_KEY:" + amazonSecurityContext.getAccessKey());
-            log.debug("SECRET_KEY:" + amazonSecurityContext.getSecretKey());
-            log.debug("AMI_ID:" + amazonSecurityContext.getAmiId());
-            log.debug("INS_ID:" + amazonSecurityContext.getInstanceId());
-            log.debug("INS_TYPE:" + amazonSecurityContext.getInstanceType());
-            log.debug("USERNAME:" + amazonSecurityContext.getUserName());
-        }
-//        job
-        details.setJobID(jobId);
-        /* Validation */
-        if (amazonSecurityContext.getAccessKey() == null || amazonSecurityContext.getAccessKey().isEmpty())
-            throw new GFacProviderException("EC2 Access Key is empty");
-        if (amazonSecurityContext.getSecretKey() == null || amazonSecurityContext.getSecretKey().isEmpty())
-            throw new GFacProviderException("EC2 Secret Key is empty");
-        if ((amazonSecurityContext.getAmiId() == null && amazonSecurityContext.getInstanceId() == null) ||
-                (amazonSecurityContext.getAmiId() != null && amazonSecurityContext.getAmiId().isEmpty()) ||
-                (amazonSecurityContext.getInstanceId() != null && amazonSecurityContext.getInstanceId().isEmpty()))
-            throw new GFacProviderException("EC2 AMI or Instance ID is empty");
-        if (amazonSecurityContext.getUserName() == null || amazonSecurityContext.getUserName().isEmpty())
-            throw new GFacProviderException("EC2 Username is empty");
-
-        /* Need to start EC2 instance before running it */
-        AWSCredentials credential =
-                new BasicAWSCredentials(amazonSecurityContext.getAccessKey(), amazonSecurityContext.getSecretKey());
-        AmazonEC2Client ec2client = new AmazonEC2Client(credential);
-        taskID = jobExecutionContext.getTaskData().getTaskID();
-		GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
-        initEc2Environment(jobExecutionContext, ec2client);
-        checkConnection(instance, ec2client);
-    }
-
-	
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-    
-        String shellCmd = createShellCmd(jobExecutionContext);
-//        AiravataAPI airavataAPI = jobExecutionContext.getGFacConfiguration().getAiravataAPI();
-//        if (airavataAPI!=null){
-//        	try {
-//				airavataAPI.getProvenanceManager().updateApplicationJobData(jobId, shellCmd);
-//			} catch (AiravataAPIInvocationException e) {
-//				log.error("Error in saving EC2 shell command!!!", e);
-//			}
-//        }
-        SshClient sshClient = new SshClient();
-        sshClient.setSocketTimeout(SOCKET_TIMEOUT);
-        SshConnectionProperties properties = new SshConnectionProperties();
-        properties.setHost(this.instance.getPublicDnsName());
-        properties.setPort(SSH_PORT);
-
-        // Connect to the host
-        try
-        {
-            String outParamName;
-            List<OutputDataObjectType> outputs = jobExecutionContext.getApplicationContext().getApplicationInterfaceDescription().getApplicationOutputs();
-            if(outputs != null && !outputs.isEmpty()) {
-                outParamName = outputs.get(0).getName();
-            } else {
-                throw new GFacProviderException("Output parameter name is not set. Therefore, not being able " +
-                        "to filter the job result from standard out ");
-            }
-
-            sshClient.connect(properties, new HostKeyVerification() {
-                public boolean verifyHost(String s, SshPublicKey sshPublicKey) throws TransportProtocolException {
-                    log.debug("Verifying Host: " + s);
-                    return true;
-                }
-            });
-            // Initialize the authentication data.
-            PublicKeyAuthenticationClient publicKeyAuth = new PublicKeyAuthenticationClient();
-            publicKeyAuth.setUsername(amazonSecurityContext.getUserName());
-            SshPrivateKeyFile file = SshPrivateKeyFile.
-                    parse(new File(System.getProperty("user.home") + "/.ssh/" + KEY_PAIR_NAME));
-            SshPrivateKey privateKey = file.toPrivateKey("");
-            publicKeyAuth.setKey(privateKey);
-
-            // Authenticate
-            int result = sshClient.authenticate(publicKeyAuth);
-            if(result== AuthenticationProtocolState.FAILED) {
-            	GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.FAILED);
-                throw new GFacProviderException("The authentication failed");
-            } else if(result==AuthenticationProtocolState.PARTIAL) {
-                throw new GFacProviderException("The authentication succeeded but another"
-                        + "authentication is required");
-            } else if(result==AuthenticationProtocolState.COMPLETE) {
-                log.info("ssh client authentication is complete...");
-            }
-            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SUBMITTED);
-            SessionChannelClient session = sshClient.openSessionChannel();
-            log.info("ssh session successfully opened...");
-            session.requestPseudoTerminal("vt100", 80, 25, 0, 0, "");
-            session.startShell();
-            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.ACTIVE);
-              
-            session.getOutputStream().write(shellCmd.getBytes());
-
-            InputStream in = session.getInputStream();
-            byte buffer[] = new byte[255];
-            int read;
-            String executionResult = "";
-            while((read = in.read(buffer)) > 0) {
-                String out = new String(buffer, 0, read);
-//                System.out.println(out);
-
-                if(out.startsWith(outParamName)) {
-                    executionResult = out.split("=")[1];
-                    log.debug("Result found in the StandardOut ");
-                    break;
-                }
-            }
-         
-            executionResult = executionResult.replace("\r","").replace("\n","");
-            log.info("Result of the job : " + executionResult);
-
-            for(OutputDataObjectType outparamType : outputs){
-                /* Assuming that there is just a single result. If you want to add more results, update the necessary
-                   logic below */
-                String paramName = outparamType.getName();
-                String value = outparamType.getValue();
-                jobExecutionContext.getOutMessageContext().addParameter(paramName, value);
-            }
-            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.COMPLETE);
-        } catch (InvalidSshKeyException e) {
-            throw new GFacProviderException("Invalid SSH key", e);
-        } catch (IOException e) {
-            throw new GFacProviderException("Error in occurred during IO", e);
-        } catch (Exception e) {
-            throw new GFacProviderException("Error parsing standard out for job execution result", e);
-        }
-
-    }
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        // Do nothing
-    }
-
-    public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacException {
-        throw new NotImplementedException();
-    }
-
-    /**
-     * Creates the command to be executed in the remote shell.
-     *
-     * @param jobExecutionContext JobExecutionContext for the cloud job
-     * @return shell command to be executed
-     * @throws GFacProviderException GFacProviderException
-     */
-    private String createShellCmd(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        try {
-            String command = "";
-            JobSubmissionInterface submissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
-            CloudJobSubmission cloudJobSubmission = GFacUtils.getCloudJobSubmission(submissionInterface.getJobSubmissionInterfaceId());
-            String executablePath = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getExecutablePath();
-            if (cloudJobSubmission.getProviderName().equals(ProviderName.EC2)) {
-                if (cloudJobSubmission.getExecutableType() != null) {
-                    command = cloudJobSubmission.getExecutableType() + " " + executablePath;
-                } else {
-                    command = "sh" + " " + executablePath;
-                }
-                command = setCmdParams(jobExecutionContext, command);
-
-            } else {
-                command = "sh" + " " + executablePath;
-                command = setCmdParams(jobExecutionContext, command);
-            }
-            return command + '\n';
-        } catch (AppCatalogException e) {
-            log.error("Error while retrieving cloud job submission", e);
-            throw new GFacProviderException("Error while retrieving cloud job submission", e);
-        }
-    }
-
-    private String setCmdParams(JobExecutionContext jobExecutionContext, String command) throws GFacProviderException {
-        List<String> inputParams = null;
-        try {
-            inputParams = ProviderUtils.getInputParameters(jobExecutionContext);
-        } catch (GFacProviderException e) {
-            throw new GFacProviderException("Error in extracting input values from JobExecutionContext");
-        }
-
-        for(String param : inputParams){
-            command = " " + command + " " + param;
-        }
-
-        log.info("Command to be executed on EC2 : " + command);
-        return command;
-    }
-
-    /**
-     * Checks whether the port 22 of the Amazon instance is accessible.
-     *
-     * @param instance Amazon instance id.
-     * @param ec2client AmazonEC2Client object
-     */
-    private void checkConnection(Instance instance, AmazonEC2Client ec2client) {
-        /* Make sure port 22 is connectible */
-        for (GroupIdentifier g : instance.getSecurityGroups()) {
-            IpPermission ip = new IpPermission();
-            ip.setIpProtocol("tcp");
-            ip.setFromPort(SSH_PORT);
-            ip.setToPort(SSH_PORT);
-            AuthorizeSecurityGroupIngressRequest r = new AuthorizeSecurityGroupIngressRequest();
-            r = r.withIpPermissions(ip.withIpRanges("0.0.0.0/0"));
-            r.setGroupId(g.getGroupId());
-            try {
-                ec2client.authorizeSecurityGroupIngress(r);
-            } catch (AmazonServiceException as) {
-                /* If exception is from duplicate room, ignore it. */
-                if (!as.getErrorCode().equals("InvalidPermission.Duplicate"))
-                    throw as;
-            }
-        }
-    }
-
-    /**
-     * Initializes the Amazon EC2 environment needed to run the Cloud job submission. This will bring
-     * up an Amazon instance (out of an AMI) or use an existing instance id.
-     *
-     * @param jobExecutionContext Job execution context.
-     * @param ec2client EC2 Client.
-     * @return instance id of the running Amazon instance.
-     * @throws GFacProviderException
-     */
-    private void initEc2Environment(JobExecutionContext jobExecutionContext, AmazonEC2Client ec2client)
-            throws GFacProviderException {
-        try {
-            /* Build key pair before start instance */
-            EC2ProviderUtil.buildKeyPair(ec2client, KEY_PAIR_NAME);
-
-            // right now, we can run it on one host
-            if (amazonSecurityContext.getAmiId() != null)
-                instance = AmazonEC2Util.startInstances(ec2client, amazonSecurityContext.getAmiId(),
-                        amazonSecurityContext.getInstanceType(), jobExecutionContext, KEY_PAIR_NAME).get(0);
-            else {
-
-                // already running instance
-                DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest();
-                DescribeInstancesResult describeInstancesResult =
-                        ec2client.describeInstances(describeInstancesRequest.
-                                withInstanceIds(amazonSecurityContext.getInstanceId()));
-
-                if (describeInstancesResult.getReservations().size() == 0 ||
-                        describeInstancesResult.getReservations().get(0).getInstances().size() == 0) {
-                    throw new GFacProviderException("Instance not found:" + amazonSecurityContext.getInstanceId());
-                }
-
-                instance = describeInstancesResult.getReservations().get(0).getInstances().get(0);
-
-                // check instance keypair
-                if (instance.getKeyName() == null || !instance.getKeyName().equals(KEY_PAIR_NAME)) {
-                    throw new GFacProviderException("Keypair for instance:" + amazonSecurityContext.getInstanceId() +
-                            " is not valid");
-                }
-            }
-
-            jobExecutionContext.getNotificationService().publish(new EC2ProviderEvent("EC2 Instance " +
-                    this.instance.getInstanceId() + " is running with public name " + this.instance.getPublicDnsName()));
-
-        } catch (Exception e) {
-            throw new GFacProviderException("Invalid Request",e);
-        }
-
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-        // do nothing
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java
deleted file mode 100644
index 42241c4..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2ProviderEvent.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import org.apache.airavata.gfac.core.notification.events.GFacEvent;
-
-public class EC2ProviderEvent extends GFacEvent {
-    String statusMessage;
-
-    public EC2ProviderEvent(String message){
-        this.eventType = EC2ProviderEvent.class.getSimpleName();
-        statusMessage = message;
-    }
-
-    public String getStatusMessage() {
-        return statusMessage;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java
deleted file mode 100644
index 485724e..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/GreedyScheduler.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.Instance;
-
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class GreedyScheduler implements SchedulingAlgorithm {
-
-    /**
-     * Returns the amazon instance id of the amazon instance which is having the minimum
-     * CPU utilization (out of the already running instances). If the instance which
-     * is having the minimum CPU utilization exceeds 80%, ami-id will be returned
-     * instead of a an instance id. If a particular running instance's uptime is
-     * greater than 55 minutes, that instance will be shut down.
-     *
-     * @return instance id
-     * @throws java.security.NoSuchAlgorithmException
-     * @throws java.security.spec.InvalidKeySpecException
-     * @throws java.io.IOException
-     */
-    public String getScheduledAmazonInstance(AmazonEC2Client ec2client, String imageId, AWSCredentials credential)
-            throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
-
-        Map<String, Double> instanceUtilMap = new HashMap<String, Double>();
-        List<Instance> instanceList = AmazonInstanceScheduler.loadInstances(ec2client);
-        // If there are no instances created at this point return the imageId
-        if(instanceList.isEmpty()){
-            return imageId;
-        }
-
-        for (Instance instance : instanceList) {
-            String instanceImageId = instance.getImageId();
-            String instanceId = instance.getInstanceId();
-            double avgCPUUtilization = AmazonInstanceScheduler.monitorInstance(credential, instanceId);
-
-            System.out.println("Image id         : " + instanceImageId);
-            System.out.println("Instance id      : " + instanceId);
-            System.out.println("CPU Utilization  : " + avgCPUUtilization);
-
-            //Storing the instance id, if that particular instance was created by the given AMI(imageId)
-            if(imageId.equalsIgnoreCase(instanceImageId)) {
-                instanceUtilMap.put(instanceId, avgCPUUtilization);
-            }
-        }
-
-        // Selects the instance with minimum CPU utilization
-        Map.Entry<String, Double> min = null;
-        for (Map.Entry<String, Double> entry : instanceUtilMap.entrySet()) {
-            if (min == null || min.getValue() > entry.getValue()) {
-                min = entry;
-            }
-        }
-
-        if((min!=null) && (min.getValue()<80)) {
-            System.out.println("Use the existing instance " + min.getKey() + " with CPU Utilization : " + min.getValue());
-            return min.getKey();
-        } else {
-            System.out.println("Create a new instance using AMI : " + imageId);
-            return imageId;
-        }
-    }
-
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java
deleted file mode 100644
index 1fb77fd..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/SchedulingAlgorithm.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-
-public interface SchedulingAlgorithm {
-
-    String getScheduledAmazonInstance(AmazonEC2Client ec2client, String imageId, AWSCredentials credential)
-            throws NoSuchAlgorithmException, InvalidKeySpecException, IOException;
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java
deleted file mode 100644
index 81b4380..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/AmazonEC2Util.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2.util;
-
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.*;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.ec2.EC2ProviderEvent;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/* This class holds the commonly used methods to communicate with Amazon EC2 environment*/
-public class AmazonEC2Util {
-
-    public static final int SLEEP_TIME_SECOND = 120;
-
-    /**
-     * Starts an Amazon instance with the given information.
-     *
-     * @param ec2 Amazon ec2 client
-     * @param amiId Amazon Machine Image (AMI) id
-     * @param insType Instance type
-     * @param jobExecutionContext Job Execution context
-     * @param keyPairName Key pair name
-     * @return list of instances
-     * @throws AmazonServiceException AmazonServiceException
-     */
-    public static List<Instance> startInstances(AmazonEC2Client ec2, String amiId, String insType,
-                                          JobExecutionContext jobExecutionContext, String keyPairName)
-            throws AmazonServiceException {
-        // start only 1 instance
-        RunInstancesRequest request = new RunInstancesRequest(amiId, 1, 1);
-        request.setKeyName(keyPairName);
-        request.setInstanceType(insType);
-
-        RunInstancesResult result = ec2.runInstances(request);
-
-        List<Instance> instances = result.getReservation().getInstances();
-
-        while (!allInstancesStateEqual(instances, InstanceStateName.Running)) {
-
-            // instance status should not be Terminated
-            if (anyInstancesStateEqual(instances, InstanceStateName.Terminated)) {
-                throw new AmazonClientException("Some Instance is terminated before running a job");
-            }
-
-            // notify the status
-            for (Instance ins: instances) {
-                jobExecutionContext.getNotificationService().publish(new EC2ProviderEvent("EC2 Instance " +
-                        ins.getInstanceId() + " is " + ins.getState().getName()));
-            }
-
-            try {
-                Thread.sleep(SLEEP_TIME_SECOND * 1000l);
-            } catch (Exception ex) {
-                // no op
-            }
-
-            DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest();
-            describeInstancesRequest.setInstanceIds(getInstanceIDs(instances));
-
-            DescribeInstancesResult describeInstancesResult = ec2.describeInstances(describeInstancesRequest);
-            instances = describeInstancesResult.getReservations().get(0).getInstances();
-        }
-
-        return instances;
-    }
-
-    public static boolean anyInstancesStateEqual(List<Instance> instances, InstanceStateName name) {
-        for (Instance instance : instances) {
-            // if one of instance is not running, return false
-            if (InstanceStateName.fromValue(instance.getState().getName()) == name) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    public static boolean allInstancesStateEqual(List<Instance> instances, InstanceStateName name) {
-        for (Instance instance : instances) {
-            // if one of instance is not running, return false
-            if (InstanceStateName.fromValue(instance.getState().getName()) != name) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    public static List<String> getInstanceIDs(List<Instance> instances) {
-        List<String> ret = new ArrayList<String>();
-        for (Instance instance : instances) {
-            ret.add(instance.getInstanceId());
-        }
-        return ret;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java
deleted file mode 100644
index 4d7fab7..0000000
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/util/EC2ProviderUtil.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2.util;
-
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.DeleteKeyPairRequest;
-import com.amazonaws.services.ec2.model.DescribeKeyPairsRequest;
-import com.amazonaws.services.ec2.model.ImportKeyPairRequest;
-import com.sshtools.j2ssh.util.Base64;
-
-import java.io.*;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
-
-import org.bouncycastle.openssl.PEMWriter;
-
-/*This class holds the utility methods used for the EC2Provider*/
-public class EC2ProviderUtil {
-
-    /**
-     * Builds a key pair with the given AmazonEC2Client and the generated key will have
-     * the name keyPairName.
-     *
-     * @param ec2 ec2client
-     * @param keyPairName name for the generated key pair
-     * @throws NoSuchAlgorithmException NoSuchAlgorithmException
-     * @throws InvalidKeySpecException InvalidKeySpecException
-     * @throws AmazonServiceException AmazonServiceException
-     * @throws AmazonClientException AmazonClientException
-     * @throws IOException IOException
-     */
-    public static void buildKeyPair(AmazonEC2Client ec2, String keyPairName)
-            throws NoSuchAlgorithmException, InvalidKeySpecException,
-            AmazonServiceException, AmazonClientException, IOException {
-        boolean newKey = false;
-
-        String privateKeyFilePath = System.getProperty("user.home") + "/.ssh/" + keyPairName;
-        File privateKeyFile = new File(privateKeyFilePath);
-        File publicKeyFile = new File(privateKeyFilePath + ".pub");
-
-        /* Check if Key-pair already created on the server */
-        if (!privateKeyFile.exists()) {
-
-            // check folder and create if it does not exist
-            File sshDir = new File(System.getProperty("user.home") + "/.ssh/");
-            if (!sshDir.exists())
-                sshDir.mkdir();
-
-            // Generate a 1024-bit RSA key pair
-            KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA");
-            keyGen.initialize(1024);
-            KeyPair keypair = keyGen.genKeyPair();
-
-            FileOutputStream fos = null;
-
-            // Store Public Key.
-            try {
-                fos = new FileOutputStream(privateKeyFilePath + ".pub");
-                fos.write(Base64.encodeBytes(keypair.getPublic().getEncoded(), true).getBytes());
-            } catch (IOException ioe) {
-                throw ioe;
-            } finally {
-                if (fos != null) {
-                    try {
-                        fos.close();
-                        fos = null;
-                    } catch (IOException ioe) {
-                        throw ioe;
-                    }
-                }
-            }
-
-            // Store Private Key.
-            try {
-                fos = new FileOutputStream(privateKeyFilePath);
-                StringWriter stringWriter = new StringWriter();
-
-                /* Write in PEM format (openssl support) */
-                PEMWriter pemFormatWriter = new PEMWriter(stringWriter);
-                pemFormatWriter.writeObject(keypair.getPrivate());
-                pemFormatWriter.close();
-                fos.write(stringWriter.toString().getBytes());
-            } catch (IOException ioe) {
-                throw ioe;
-            } finally {
-                if (fos != null) {
-                    try {
-                        fos.close();
-                        fos = null;
-                    } catch (IOException ioe) {
-                        throw ioe;
-                    }
-                }
-            }
-
-            privateKeyFile.setWritable(false, false);
-            privateKeyFile.setExecutable(false, false);
-            privateKeyFile.setReadable(false, false);
-            privateKeyFile.setReadable(true);
-            privateKeyFile.setWritable(true);
-
-            // set that this key is just created
-            newKey = true;
-        }
-
-        /* Read Public Key */
-        String encodedPublicKey = null;
-        BufferedReader br = null;
-        try {
-            br = new BufferedReader(new FileReader(publicKeyFile));
-            encodedPublicKey = br.readLine();
-        } catch (IOException ioe) {
-            throw ioe;
-        } finally {
-            if (br != null) {
-                try {
-                    br.close();
-                    br = null;
-                } catch (IOException ioe) {
-                    throw ioe;
-                }
-            }
-        }
-
-        /* Generate key pair in Amazon if necessary */
-        try {
-            /* Get current key pair in Amazon */
-            DescribeKeyPairsRequest describeKeyPairsRequest = new DescribeKeyPairsRequest();
-            ec2.describeKeyPairs(describeKeyPairsRequest.withKeyNames(keyPairName));
-
-            /* If key exists and new key is created, delete old key and replace
-             * with new one. Else, do nothing */
-            if (newKey) {
-                DeleteKeyPairRequest deleteKeyPairRequest = new DeleteKeyPairRequest(keyPairName);
-                ec2.deleteKeyPair(deleteKeyPairRequest);
-                ImportKeyPairRequest importKeyPairRequest = new ImportKeyPairRequest(keyPairName, encodedPublicKey);
-                ec2.importKeyPair(importKeyPairRequest);
-            }
-
-        } catch (AmazonServiceException ase) {
-            /* Key doesn't exists, import new key. */
-            if (ase.getErrorCode().equals("InvalidKeyPair.NotFound")) {
-                ImportKeyPairRequest importKeyPairRequest = new ImportKeyPairRequest(keyPairName, encodedPublicKey);
-                ec2.importKeyPair(importKeyPairRequest);
-            } else {
-                throw ase;
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/resources/errors.properties b/modules/gfac/gfac-ec2/src/main/resources/errors.properties
deleted file mode 100644
index 88c41b8..0000000
--- a/modules/gfac/gfac-ec2/src/main/resources/errors.properties
+++ /dev/null
@@ -1,197 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# Directly copied from jglobus. Not a good way to manager error properties.
-1 = Parameter not supported
-2 = The RSL length is greater than the maximum allowed
-3 = No resources available
-4 = Bad directory specified
-5 = The executable does not exist
-6 = Insufficient funds
-7 = Authentication with the remote server failed
-8 = Job cancelled by user
-9 = Job cancelled by system
-
-10 = Data transfer to the server failed
-11 = The stdin file does not exist
-12 = The connection to the server failed (check host and port)
-13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
-14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
-15 = The job manager received an invalid RSL
-16 = Could not connect to job manager
-17 = The job failed when the job manager attempted to run it
-18 = Paradyn error
-19 = The provided RSL 'jobtype' value is invalid
-
-20 = The provided RSL 'myjob' value is invalid
-21 = The job manager failed to locate an internal script argument file
-22 = The job manager failed to create an internal script argument file
-23 = The job manager detected an invalid job state
-24 = The job manager detected an invalid script response
-25 = The job manager detected an invalid job state
-26 = The provided RSL 'jobtype' value is not supported by this job manager
-27 = Unimplemented
-28 = The job manager failed to create an internal script submission file
-29 = The job manager cannot find the user proxy
-
-30 = The job manager failed to open the user proxy
-31 = The job manager failed to cancel the job as requested
-32 = System memory allocation failed
-33 = The interprocess job communication initialization failed
-34 = The interprocess job communication setup failed
-35 = The provided RSL 'host count' value is invalid
-36 = One of the provided RSL parameters is unsupported
-37 = The provided RSL 'queue' parameter is invalid
-38 = The provided RSL 'project' parameter is invalid
-39 = The provided RSL string includes variables that could not be identified
-
-40 = The provided RSL 'environment' parameter is invalid
-41 = The provided RSL 'dryrun' parameter is invalid
-42 = The provided RSL is invalid (an empty string)
-43 = The job manager failed to stage the executable
-44 = The job manager failed to stage the stdin file
-45 = The requested job manager type is invalid
-46 = The provided RSL 'arguments' parameter is invalid
-47 = The gatekeeper failed to run the job manager
-48 = The provided RSL could not be properly parsed
-49 = There is a version mismatch between GRAM components
-
-50 = The provided RSL 'arguments' parameter is invalid
-51 = The provided RSL 'count' parameter is invalid
-52 = The provided RSL 'directory' parameter is invalid
-53 = The provided RSL 'dryrun' parameter is invalid
-54 = The provided RSL 'environment' parameter is invalid
-55 = The provided RSL 'executable' parameter is invalid
-56 = The provided RSL 'host_count' parameter is invalid
-57 = The provided RSL 'jobtype' parameter is invalid
-58 = The provided RSL 'maxtime' parameter is invalid
-59 = The provided RSL 'myjob' parameter is invalid
-
-60 = The provided RSL 'paradyn' parameter is invalid
-61 = The provided RSL 'project' parameter is invalid
-62 = The provided RSL 'queue' parameter is invalid
-63 = The provided RSL 'stderr' parameter is invalid
-64 = The provided RSL 'stdin' parameter is invalid
-65 = The provided RSL 'stdout' parameter is invalid
-66 = The job manager failed to locate an internal script
-67 = The job manager failed on the system call pipe()
-68 = The job manager failed on the system call fcntl()
-69 = The job manager failed to create the temporary stdout filename
-
-70 = The job manager failed to create the temporary stderr filename
-71 = The job manager failed on the system call fork()
-72 = The executable file permissions do not allow execution
-73 = The job manager failed to open stdout
-74 = The job manager failed to open stderr
-75 = The cache file could not be opened in order to relocate the user proxy
-76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
-77 = The job manager failed to insert the contact in the client contact list
-78 = The contact was not found in the job manager's client contact list
-79 = Connecting to the job manager failed.  Possible reasons: job terminated, invalid job contact, network problems, ...
-
-80 = The syntax of the job contact is invalid
-81 = The executable parameter in the RSL is undefined
-82 = The job manager service is misconfigured.  condor arch undefined
-83 = The job manager service is misconfigured.  condor os undefined
-84 = The provided RSL 'min_memory' parameter is invalid
-85 = The provided RSL 'max_memory' parameter is invalid
-86 = The RSL 'min_memory' value is not zero or greater
-87 = The RSL 'max_memory' value is not zero or greater
-88 = The creation of a HTTP message failed
-89 = Parsing incoming HTTP message failed
-
-90 = The packing of information into a HTTP message failed
-91 = An incoming HTTP message did not contain the expected information
-92 = The job manager does not support the service that the client requested
-93 = The gatekeeper failed to find the requested service
-94 = The jobmanager does not accept any new requests (shutting down)
-95 = The client failed to close the listener associated with the callback URL
-96 = The gatekeeper contact cannot be parsed
-97 = The job manager could not find the 'poe' command
-98 = The job manager could not find the 'mpirun' command
-99 = The provided RSL 'start_time' parameter is invalid"
-100 = The provided RSL 'reservation_handle' parameter is invalid
-
-101 = The provided RSL 'max_wall_time' parameter is invalid
-102 = The RSL 'max_wall_time' value is not zero or greater
-103 = The provided RSL 'max_cpu_time' parameter is invalid
-104 = The RSL 'max_cpu_time' value is not zero or greater
-105 = The job manager is misconfigured, a scheduler script is missing
-106 = The job manager is misconfigured, a scheduler script has invalid permissions
-107 = The job manager failed to signal the job
-108 = The job manager did not recognize/support the signal type
-109 = The job manager failed to get the job id from the local scheduler
-
-110 = The job manager is waiting for a commit signal
-111 = The job manager timed out while waiting for a commit signal
-112 = The provided RSL 'save_state' parameter is invalid
-113 = The provided RSL 'restart' parameter is invalid
-114 = The provided RSL 'two_phase' parameter is invalid
-115 = The RSL 'two_phase' value is not zero or greater
-116 = The provided RSL 'stdout_position' parameter is invalid
-117 = The RSL 'stdout_position' value is not zero or greater
-118 = The provided RSL 'stderr_position' parameter is invalid
-119 = The RSL 'stderr_position' value is not zero or greater
-
-120 = The job manager restart attempt failed
-121 = The job state file doesn't exist
-122 = Could not read the job state file
-123 = Could not write the job state file
-124 = The old job manager is still alive
-125 = The job manager state file TTL expired
-126 = It is unknown if the job was submitted
-127 = The provided RSL 'remote_io_url' parameter is invalid
-128 = Could not write the remote io url file
-129 = The standard output/error size is different
-
-130 = The job manager was sent a stop signal (job is still running)
-131 = The user proxy expired (job is still running)
-132 = The job was not submitted by original jobmanager
-133 = The job manager is not waiting for that commit signal
-134 = The provided RSL scheduler specific parameter is invalid
-135 = The job manager could not stage in a file
-136 = The scratch directory could not be created
-137 = The provided 'gass_cache' parameter is invalid
-138 = The RSL contains attributes which are not valid for job submission
-139 = The RSL contains attributes which are not valid for stdio update
-
-140 = The RSL contains attributes which are not valid for job restart
-141 = The provided RSL 'file_stage_in' parameter is invalid
-142 = The provided RSL 'file_stage_in_shared' parameter is invalid
-143 = The provided RSL 'file_stage_out' parameter is invalid
-144 = The provided RSL 'gass_cache' parameter is invalid
-145 = The provided RSL 'file_cleanup' parameter is invalid
-146 = The provided RSL 'scratch_dir' parameter is invalid
-147 = The provided scheduler-specific RSL parameter is invalid
-148 = A required RSL attribute was not defined in the RSL spec
-149 = The gass_cache attribute points to an invalid cache directory
-
-150 = The provided RSL 'save_state' parameter has an invalid value
-151 = The job manager could not open the RSL attribute validation file
-152 = The  job manager could not read the RSL attribute validation file
-153 = The provided RSL 'proxy_timeout' is invalid
-154 = The RSL 'proxy_timeout' value is not greater than zero
-155 = The job manager could not stage out a file
-156 = The job contact string does not match any which the job manager is handling
-157 = Proxy delegation failed
-158 = The job manager could not lock the state lock file
-
-1000 = Failed to start up callback handler
-1003 = Job contact not set

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-ec2/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/resources/service.properties b/modules/gfac/gfac-ec2/src/main/resources/service.properties
deleted file mode 100644
index 8275a10..0000000
--- a/modules/gfac/gfac-ec2/src/main/resources/service.properties
+++ /dev/null
@@ -1,57 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-#
-# Class which implemented Scheduler interface. It will be used to determine a Provider
-#
-scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
-
-#
-# Data Service Plugins classes
-#
-datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
-
-#
-# Pre execution Plugins classes. For example, GridFTP Input Staging
-#
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
-
-#
-# Post execution Plugins classes. For example, GridFTP Output Staging
-#
-postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
-postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
-
-#
-# SSH private key location. It will be used by SSHProvider
-#
-# ssh.key=/home/user/.ssh/id_rsa
-# ssh.keypass=
-# ssh.username=usernameAtHost
-
-#
-# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
-#
-# myproxy.server=myproxy.teragrid.org
-# myproxy.user=username
-# myproxy.pass=password
-# myproxy.life=3600
\ No newline at end of file