You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by la...@apache.org on 2014/04/28 17:26:31 UTC

[1/7] git commit: seperate out gfac-local from gfac-core

Repository: airavata
Updated Branches:
  refs/heads/temp [created] 9a7873094


seperate out gfac-local from gfac-core


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/989a1fd5
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/989a1fd5
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/989a1fd5

Branch: refs/heads/temp
Commit: 989a1fd5d4e62135a16c87531c1698f8e3e65304
Parents: ec1d2b8
Author: Nipun Udara <y....@gmail.com>
Authored: Sat Apr 26 11:07:11 2014 +0530
Committer: Nipun Udara <y....@gmail.com>
Committed: Sat Apr 26 11:07:11 2014 +0530

----------------------------------------------------------------------
 modules/gfac/gfac-local/pom.xml                 |  89 +++++++
 .../handler/LocalDirectorySetupHandler.java     |  62 +++++
 .../gfac/provider/impl/LocalProvider.java       | 240 +++++++++++++++++++
 .../gfac/utils/InputStreamToFileWriter.java     |  68 ++++++
 .../apache/airavata/gfac/utils/InputUtils.java  |  39 +++
 .../airavata/gfac/utils/LocalProviderUtil.java  |  54 +++++
 .../src/main/resources/errors.properties        | 197 +++++++++++++++
 .../src/main/resources/service.properties       |  58 +++++
 .../gfac/services/impl/LocalProviderTest.java   | 150 ++++++++++++
 .../src/test/resources/PBSTemplate.xslt         |  73 ++++++
 .../src/test/resources/gfac-config.xml          |  90 +++++++
 .../src/test/resources/logging.properties       |  42 ++++
 12 files changed, 1162 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/pom.xml b/modules/gfac/gfac-local/pom.xml
new file mode 100644
index 0000000..580424b
--- /dev/null
+++ b/modules/gfac/gfac-local/pom.xml
@@ -0,0 +1,89 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file 
+    distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under 
+    the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may 
+    obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to 
+    in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
+    ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under 
+    the License. -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.airavata</groupId>
+        <artifactId>gfac</artifactId>
+        <version>0.12-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>airavata-gfac-local</artifactId>
+    <name>Airavata GFac Local implementation</name>
+    <description>The core GFAC functionality independent from any webservice implementation.</description>
+    <url>http://airavata.apache.org/</url>
+
+    <dependencies>
+
+        <!-- Logging -->
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+
+        <!-- GFAC schemas -->
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-gfac-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+
+        <!-- Test -->
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>6.1.1</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>copy-dependencies</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>copy-dependencies</goal>
+                        </goals>
+                        <configuration>
+                            <outputDirectory>target/lib</outputDirectory>
+                            <overWriteReleases>false</overWriteReleases>
+                            <overWriteSnapshots>true</overWriteSnapshots>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java
new file mode 100644
index 0000000..8174a3d
--- /dev/null
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java
@@ -0,0 +1,62 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.handler;
+
+import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.HostDescriptionType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.Map;
+
+public class LocalDirectorySetupHandler implements GFacHandler{
+    private static final Logger log = LoggerFactory.getLogger(LocalDirectorySetupHandler.class);
+
+    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+        log.info("Invoking GramDirectorySetupHandler ...");
+        HostDescriptionType type = jobExecutionContext.getApplicationContext().getHostDescription().getType();
+        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
+        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
+        log.debug("working diectroy = " + app.getStaticWorkingDirectory());
+        log.debug("temp directory = " + app.getScratchWorkingDirectory());
+
+        makeFileSystemDir(app.getStaticWorkingDirectory(),jobExecutionContext);
+        makeFileSystemDir(app.getScratchWorkingDirectory(),jobExecutionContext);
+        makeFileSystemDir(app.getInputDataDirectory(),jobExecutionContext);
+        makeFileSystemDir(app.getOutputDataDirectory(),jobExecutionContext);
+    }
+    private void makeFileSystemDir(String dir, JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+           File f = new File(dir);
+           if (f.isDirectory() && f.exists()) {
+               return;
+           } else if (!new File(dir).mkdir()) {
+               throw new GFacHandlerException("Cannot make directory "+dir);
+           }
+    }
+
+    public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java
new file mode 100644
index 0000000..a12bf5d
--- /dev/null
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java
@@ -0,0 +1,240 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.provider.impl;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.airavata.gfac.Constants;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.notification.events.StartExecutionEvent;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.gfac.provider.utils.ProviderUtils;
+import org.apache.airavata.gfac.utils.GFacUtils;
+import org.apache.airavata.gfac.utils.InputStreamToFileWriter;
+import org.apache.airavata.gfac.utils.InputUtils;
+import org.apache.airavata.gfac.utils.OutputUtils;
+import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+import org.apache.airavata.model.workspace.experiment.JobDetails;
+import org.apache.airavata.model.workspace.experiment.JobState;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.NameValuePairType;
+import org.apache.xmlbeans.XmlException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import sun.reflect.generics.reflectiveObjects.NotImplementedException;
+
+public class LocalProvider extends AbstractProvider{
+    private static final Logger log = LoggerFactory.getLogger(LocalProvider.class);
+    private ProcessBuilder builder;
+    private List<String> cmdList;
+    private String jobId;
+    
+    public static class LocalProviderJobData{
+    	private String applicationName;
+    	private List<String> inputParameters;
+    	private String workingDir;
+    	private String inputDir;
+    	private String outputDir;
+		public String getApplicationName() {
+			return applicationName;
+		}
+		public void setApplicationName(String applicationName) {
+			this.applicationName = applicationName;
+		}
+		public List<String> getInputParameters() {
+			return inputParameters;
+		}
+		public void setInputParameters(List<String> inputParameters) {
+			this.inputParameters = inputParameters;
+		}
+		public String getWorkingDir() {
+			return workingDir;
+		}
+		public void setWorkingDir(String workingDir) {
+			this.workingDir = workingDir;
+		}
+		public String getInputDir() {
+			return inputDir;
+		}
+		public void setInputDir(String inputDir) {
+			this.inputDir = inputDir;
+		}
+		public String getOutputDir() {
+			return outputDir;
+		}
+		public void setOutputDir(String outputDir) {
+			this.outputDir = outputDir;
+		}
+    }
+    public LocalProvider(){
+        cmdList = new ArrayList<String>();
+    }
+
+    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
+    	super.initialize(jobExecutionContext);
+        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().
+                getApplicationDeploymentDescription().getType();
+
+        buildCommand(app.getExecutableLocation(), ProviderUtils.getInputParameters(jobExecutionContext));
+        initProcessBuilder(app);
+
+        // extra environment variables
+        builder.environment().put(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
+        builder.environment().put(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());
+
+        // set working directory
+        builder.directory(new File(app.getStaticWorkingDirectory()));
+
+        // log info
+        log.info("Command = " + InputUtils.buildCommand(cmdList));
+        log.info("Working dir = " + builder.directory());
+        for (String key : builder.environment().keySet()) {
+            log.info("Env[" + key + "] = " + builder.environment().get(key));
+        }
+    }
+
+    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+        jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
+         ApplicationDeploymentDescriptionType app = jobExecutionContext.
+                 getApplicationContext().getApplicationDeploymentDescription().getType();
+        JobDetails jobDetails = new JobDetails();
+        try {
+        	jobId= jobExecutionContext.getTaskData().getTaskID();
+            jobDetails.setJobID(jobId);
+            jobDetails.setJobDescription(app.toString());
+            jobExecutionContext.setJobDetails(jobDetails);
+            JobDescriptor jobDescriptor = GFacUtils.createJobDescriptor(jobExecutionContext, app, null);
+            jobDetails.setJobDescription(jobDescriptor.toXML());
+            GFacUtils.saveJobStatus(jobDetails, JobState.SETUP, jobExecutionContext.getTaskData().getTaskID());
+        	// running cmd
+            Process process = builder.start();
+
+            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), app.getStandardOutput());
+            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), app.getStandardError());
+
+            // start output threads
+            standardOutWriter.setDaemon(true);
+            standardErrorWriter.setDaemon(true);
+            standardOutWriter.start();
+            standardErrorWriter.start();
+
+            int returnValue = process.waitFor();
+
+            // make sure other two threads are done
+            standardOutWriter.join();
+            standardErrorWriter.join();
+
+            /*
+             * check return value. usually not very helpful to draw conclusions based on return values so don't bother.
+             * just provide warning in the log messages
+             */
+            if (returnValue != 0) {
+                log.error("Process finished with non zero return value. Process may have failed");
+            } else {
+                log.info("Process finished with return value of zero.");
+            }
+
+            StringBuffer buf = new StringBuffer();
+            buf.append("Executed ").append(InputUtils.buildCommand(cmdList))
+                    .append(" on the localHost, working directory = ").append(app.getStaticWorkingDirectory())
+                    .append(" tempDirectory = ").append(app.getScratchWorkingDirectory()).append(" With the status ")
+                    .append(String.valueOf(returnValue));
+            log.info(buf.toString());
+        } catch (IOException io) {
+            throw new GFacProviderException(io.getMessage(), io);
+        } catch (InterruptedException e) {
+            throw new GFacProviderException(e.getMessage(), e);
+        }catch (GFacException e) {
+            throw new GFacProviderException(e.getMessage(), e);
+        }
+    }
+
+//	private void saveApplicationJob(JobExecutionContext jobExecutionContext)
+//			throws GFacProviderException {
+//		ApplicationDeploymentDescriptionType app = jobExecutionContext.
+//                getApplicationContext().getApplicationDeploymentDescription().getType();
+//		ApplicationJob appJob = GFacUtils.createApplicationJob(jobExecutionContext);
+//		appJob.setJobId(jobId);
+//		LocalProviderJobData data = new LocalProviderJobData();
+//		data.setApplicationName(app.getExecutableLocation());
+//		data.setInputDir(app.getInputDataDirectory());
+//		data.setOutputDir(app.getOutputDataDirectory());
+//		data.setWorkingDir(builder.directory().toString());
+//		data.setInputParameters(ProviderUtils.getInputParameters(jobExecutionContext));
+//		ByteArrayOutputStream stream = new ByteArrayOutputStream();
+//		JAXB.marshal(data, stream);
+//		appJob.setJobData(stream.toString());
+//		appJob.setSubmittedTime(Calendar.getInstance().getTime());
+//		appJob.setStatus(ApplicationJobStatus.SUBMITTED);
+//		appJob.setStatusUpdateTime(appJob.getSubmittedTime());
+//		GFacUtils.recordApplicationJob(jobExecutionContext, appJob);
+//	}
+
+    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
+
+        try {
+            String stdOutStr = GFacUtils.readFileToString(app.getStandardOutput());
+            String stdErrStr = GFacUtils.readFileToString(app.getStandardError());
+			Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
+            OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr);
+        } catch (XmlException e) {
+            throw new GFacProviderException("Cannot read output:" + e.getMessage(), e);
+        } catch (IOException io) {
+            throw new GFacProviderException(io.getMessage(), io);
+        } catch (Exception e){
+        	throw new GFacProviderException("Error in retrieving results",e);
+        }
+    }
+
+    public void cancelJob(String jobId, JobExecutionContext jobExecutionContext) throws GFacException {
+        throw new NotImplementedException();
+    }
+
+
+    private void buildCommand(String executable, List<String> inputParameterList){
+        cmdList.add(executable);
+        cmdList.addAll(inputParameterList);
+    }
+
+    private void initProcessBuilder(ApplicationDeploymentDescriptionType app){
+        builder = new ProcessBuilder(cmdList);
+
+        NameValuePairType[] env = app.getApplicationEnvironmentArray();
+
+        if(env != null && env.length > 0){
+            Map<String,String> builderEnv = builder.environment();
+            for (NameValuePairType entry : env) {
+                builderEnv.put(entry.getName(), entry.getValue());
+            }
+        }
+    }
+
+    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java
new file mode 100644
index 0000000..691e11d
--- /dev/null
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java
@@ -0,0 +1,68 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.utils;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.*;
+
+public class InputStreamToFileWriter extends Thread{
+    protected final Logger log = LoggerFactory.getLogger(this.getClass());
+
+    private BufferedReader in;
+    private BufferedWriter out;
+
+    public InputStreamToFileWriter(InputStream in, String out) throws IOException {
+        this.in = new BufferedReader(new InputStreamReader(in));
+        this.out = new BufferedWriter(new FileWriter(out));
+    }
+
+    public void run() {
+        try {
+            String line = null;
+            while ((line = in.readLine()) != null) {
+                if (log.isDebugEnabled()) {
+                    log.debug(line);
+                }
+                out.write(line);
+                out.newLine();
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (in != null) {
+                try {
+                    in.close();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+            if (out != null) {
+                try {
+                    out.close();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java
new file mode 100644
index 0000000..dedfa29
--- /dev/null
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java
@@ -0,0 +1,39 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.utils;
+
+import java.util.List;
+
+public class InputUtils {
+    private static final String SPACE = " ";
+
+    private InputUtils() {
+    }
+
+    public static String buildCommand(List<String> cmdList) {
+        StringBuffer buff = new StringBuffer();
+        for (String string : cmdList) {
+            buff.append(string);
+            buff.append(SPACE);
+        }
+        return buff.toString();
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java
new file mode 100644
index 0000000..c80eeda
--- /dev/null
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java
@@ -0,0 +1,54 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.utils;
+
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+
+public class LocalProviderUtil {
+    private static final Logger log = LoggerFactory.getLogger(LocalProviderUtil.class);
+
+    private void makeFileSystemDir(String dir) throws GFacProviderException {
+        File f = new File(dir);
+        if (f.isDirectory() && f.exists()) {
+            return;
+        } else if (!new File(dir).mkdir()) {
+            throw new GFacProviderException("Cannot make directory " + dir);
+        }
+    }
+
+    public void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+        ApplicationDeploymentDescriptionType app = jobExecutionContext.
+                getApplicationContext().getApplicationDeploymentDescription().getType();
+        log.info("working diectroy = " + app.getStaticWorkingDirectory());
+        log.info("temp directory = " + app.getScratchWorkingDirectory());
+        makeFileSystemDir(app.getStaticWorkingDirectory());
+        makeFileSystemDir(app.getScratchWorkingDirectory());
+        makeFileSystemDir(app.getInputDataDirectory());
+        makeFileSystemDir(app.getOutputDataDirectory());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/resources/errors.properties b/modules/gfac/gfac-local/src/main/resources/errors.properties
new file mode 100644
index 0000000..88c41b8
--- /dev/null
+++ b/modules/gfac/gfac-local/src/main/resources/errors.properties
@@ -0,0 +1,197 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Directly copied from jglobus. Not a good way to manager error properties.
+1 = Parameter not supported
+2 = The RSL length is greater than the maximum allowed
+3 = No resources available
+4 = Bad directory specified
+5 = The executable does not exist
+6 = Insufficient funds
+7 = Authentication with the remote server failed
+8 = Job cancelled by user
+9 = Job cancelled by system
+
+10 = Data transfer to the server failed
+11 = The stdin file does not exist
+12 = The connection to the server failed (check host and port)
+13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
+14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
+15 = The job manager received an invalid RSL
+16 = Could not connect to job manager
+17 = The job failed when the job manager attempted to run it
+18 = Paradyn error
+19 = The provided RSL 'jobtype' value is invalid
+
+20 = The provided RSL 'myjob' value is invalid
+21 = The job manager failed to locate an internal script argument file
+22 = The job manager failed to create an internal script argument file
+23 = The job manager detected an invalid job state
+24 = The job manager detected an invalid script response
+25 = The job manager detected an invalid job state
+26 = The provided RSL 'jobtype' value is not supported by this job manager
+27 = Unimplemented
+28 = The job manager failed to create an internal script submission file
+29 = The job manager cannot find the user proxy
+
+30 = The job manager failed to open the user proxy
+31 = The job manager failed to cancel the job as requested
+32 = System memory allocation failed
+33 = The interprocess job communication initialization failed
+34 = The interprocess job communication setup failed
+35 = The provided RSL 'host count' value is invalid
+36 = One of the provided RSL parameters is unsupported
+37 = The provided RSL 'queue' parameter is invalid
+38 = The provided RSL 'project' parameter is invalid
+39 = The provided RSL string includes variables that could not be identified
+
+40 = The provided RSL 'environment' parameter is invalid
+41 = The provided RSL 'dryrun' parameter is invalid
+42 = The provided RSL is invalid (an empty string)
+43 = The job manager failed to stage the executable
+44 = The job manager failed to stage the stdin file
+45 = The requested job manager type is invalid
+46 = The provided RSL 'arguments' parameter is invalid
+47 = The gatekeeper failed to run the job manager
+48 = The provided RSL could not be properly parsed
+49 = There is a version mismatch between GRAM components
+
+50 = The provided RSL 'arguments' parameter is invalid
+51 = The provided RSL 'count' parameter is invalid
+52 = The provided RSL 'directory' parameter is invalid
+53 = The provided RSL 'dryrun' parameter is invalid
+54 = The provided RSL 'environment' parameter is invalid
+55 = The provided RSL 'executable' parameter is invalid
+56 = The provided RSL 'host_count' parameter is invalid
+57 = The provided RSL 'jobtype' parameter is invalid
+58 = The provided RSL 'maxtime' parameter is invalid
+59 = The provided RSL 'myjob' parameter is invalid
+
+60 = The provided RSL 'paradyn' parameter is invalid
+61 = The provided RSL 'project' parameter is invalid
+62 = The provided RSL 'queue' parameter is invalid
+63 = The provided RSL 'stderr' parameter is invalid
+64 = The provided RSL 'stdin' parameter is invalid
+65 = The provided RSL 'stdout' parameter is invalid
+66 = The job manager failed to locate an internal script
+67 = The job manager failed on the system call pipe()
+68 = The job manager failed on the system call fcntl()
+69 = The job manager failed to create the temporary stdout filename
+
+70 = The job manager failed to create the temporary stderr filename
+71 = The job manager failed on the system call fork()
+72 = The executable file permissions do not allow execution
+73 = The job manager failed to open stdout
+74 = The job manager failed to open stderr
+75 = The cache file could not be opened in order to relocate the user proxy
+76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
+77 = The job manager failed to insert the contact in the client contact list
+78 = The contact was not found in the job manager's client contact list
+79 = Connecting to the job manager failed.  Possible reasons: job terminated, invalid job contact, network problems, ...
+
+80 = The syntax of the job contact is invalid
+81 = The executable parameter in the RSL is undefined
+82 = The job manager service is misconfigured.  condor arch undefined
+83 = The job manager service is misconfigured.  condor os undefined
+84 = The provided RSL 'min_memory' parameter is invalid
+85 = The provided RSL 'max_memory' parameter is invalid
+86 = The RSL 'min_memory' value is not zero or greater
+87 = The RSL 'max_memory' value is not zero or greater
+88 = The creation of a HTTP message failed
+89 = Parsing incoming HTTP message failed
+
+90 = The packing of information into a HTTP message failed
+91 = An incoming HTTP message did not contain the expected information
+92 = The job manager does not support the service that the client requested
+93 = The gatekeeper failed to find the requested service
+94 = The jobmanager does not accept any new requests (shutting down)
+95 = The client failed to close the listener associated with the callback URL
+96 = The gatekeeper contact cannot be parsed
+97 = The job manager could not find the 'poe' command
+98 = The job manager could not find the 'mpirun' command
+99 = The provided RSL 'start_time' parameter is invalid"
+100 = The provided RSL 'reservation_handle' parameter is invalid
+
+101 = The provided RSL 'max_wall_time' parameter is invalid
+102 = The RSL 'max_wall_time' value is not zero or greater
+103 = The provided RSL 'max_cpu_time' parameter is invalid
+104 = The RSL 'max_cpu_time' value is not zero or greater
+105 = The job manager is misconfigured, a scheduler script is missing
+106 = The job manager is misconfigured, a scheduler script has invalid permissions
+107 = The job manager failed to signal the job
+108 = The job manager did not recognize/support the signal type
+109 = The job manager failed to get the job id from the local scheduler
+
+110 = The job manager is waiting for a commit signal
+111 = The job manager timed out while waiting for a commit signal
+112 = The provided RSL 'save_state' parameter is invalid
+113 = The provided RSL 'restart' parameter is invalid
+114 = The provided RSL 'two_phase' parameter is invalid
+115 = The RSL 'two_phase' value is not zero or greater
+116 = The provided RSL 'stdout_position' parameter is invalid
+117 = The RSL 'stdout_position' value is not zero or greater
+118 = The provided RSL 'stderr_position' parameter is invalid
+119 = The RSL 'stderr_position' value is not zero or greater
+
+120 = The job manager restart attempt failed
+121 = The job state file doesn't exist
+122 = Could not read the job state file
+123 = Could not write the job state file
+124 = The old job manager is still alive
+125 = The job manager state file TTL expired
+126 = It is unknown if the job was submitted
+127 = The provided RSL 'remote_io_url' parameter is invalid
+128 = Could not write the remote io url file
+129 = The standard output/error size is different
+
+130 = The job manager was sent a stop signal (job is still running)
+131 = The user proxy expired (job is still running)
+132 = The job was not submitted by original jobmanager
+133 = The job manager is not waiting for that commit signal
+134 = The provided RSL scheduler specific parameter is invalid
+135 = The job manager could not stage in a file
+136 = The scratch directory could not be created
+137 = The provided 'gass_cache' parameter is invalid
+138 = The RSL contains attributes which are not valid for job submission
+139 = The RSL contains attributes which are not valid for stdio update
+
+140 = The RSL contains attributes which are not valid for job restart
+141 = The provided RSL 'file_stage_in' parameter is invalid
+142 = The provided RSL 'file_stage_in_shared' parameter is invalid
+143 = The provided RSL 'file_stage_out' parameter is invalid
+144 = The provided RSL 'gass_cache' parameter is invalid
+145 = The provided RSL 'file_cleanup' parameter is invalid
+146 = The provided RSL 'scratch_dir' parameter is invalid
+147 = The provided scheduler-specific RSL parameter is invalid
+148 = A required RSL attribute was not defined in the RSL spec
+149 = The gass_cache attribute points to an invalid cache directory
+
+150 = The provided RSL 'save_state' parameter has an invalid value
+151 = The job manager could not open the RSL attribute validation file
+152 = The  job manager could not read the RSL attribute validation file
+153 = The provided RSL 'proxy_timeout' is invalid
+154 = The RSL 'proxy_timeout' value is not greater than zero
+155 = The job manager could not stage out a file
+156 = The job contact string does not match any which the job manager is handling
+157 = Proxy delegation failed
+158 = The job manager could not lock the state lock file
+
+1000 = Failed to start up callback handler
+1003 = Job contact not set

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/resources/service.properties b/modules/gfac/gfac-local/src/main/resources/service.properties
new file mode 100644
index 0000000..391bfea
--- /dev/null
+++ b/modules/gfac/gfac-local/src/main/resources/service.properties
@@ -0,0 +1,58 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+#
+# Class which implemented Scheduler interface. It will be used to determine a Provider
+#
+scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
+
+#
+# Data Service Plugins classes
+#
+datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
+
+#
+# Pre execution Plugins classes. For example, GridFTP Input Staging
+#
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
+
+#
+# Post execution Plugins classes. For example, GridFTP Output Staging
+#
+postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
+postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
+
+#
+# SSH private key location. It will be used by SSHProvider
+#
+# ssh.key=/home/user/.ssh/id_rsa
+# ssh.keypass=
+# ssh.username=usernameAtHost
+
+#
+# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
+#
+# myproxy.server=myproxy.teragrid.org
+# myproxy.user=username
+# myproxy.pass=password
+# myproxy.life=3600
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java b/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
new file mode 100644
index 0000000..c33a8bc
--- /dev/null
+++ b/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
@@ -0,0 +1,150 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.core.gfac.services.impl;
+
+import org.apache.airavata.commons.gfac.type.*;
+import org.apache.airavata.gfac.GFacConfiguration;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.ApplicationContext;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.cpi.GFacImpl;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.InputParameterType;
+import org.apache.airavata.schemas.gfac.OutputParameterType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.commons.lang.SystemUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+public class LocalProviderTest {
+    private JobExecutionContext jobExecutionContext;
+    @Before
+    public void setUp() throws Exception {
+
+        URL resource = this.getClass().getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+        File configFile = new File(resource.getPath());
+        GFacConfiguration gFacConfiguration = GFacConfiguration.create(configFile, null, null);
+        //have to set InFlwo Handlers and outFlowHandlers
+        ApplicationContext applicationContext = new ApplicationContext();
+        HostDescription host = new HostDescription();
+        host.getType().setHostName("localhost");
+        host.getType().setHostAddress("localhost");
+        applicationContext.setHostDescription(host);
+        /*
+           * App
+           */
+        ApplicationDescription appDesc = new ApplicationDescription();
+        ApplicationDeploymentDescriptionType app = appDesc.getType();
+        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+        name.setStringValue("EchoLocal");
+        app.setApplicationName(name);
+
+        /*
+           * Use bat file if it is compiled on Windows
+           */
+        if (SystemUtils.IS_OS_WINDOWS) {
+            URL url = this.getClass().getClassLoader().getResource("echo.bat");
+            app.setExecutableLocation(url.getFile());
+        } else {
+            //for unix and Mac
+            app.setExecutableLocation("/bin/echo");
+        }
+
+        /*
+           * Default tmp location
+           */
+        String tempDir = System.getProperty("java.io.tmpdir");
+        if (tempDir == null) {
+            tempDir = "/tmp";
+        }
+
+        app.setScratchWorkingDirectory(tempDir);
+        app.setStaticWorkingDirectory(tempDir);
+        app.setInputDataDirectory(tempDir + File.separator + "input");
+        app.setOutputDataDirectory(tempDir + File.separator + "output");
+        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
+        app.setStandardError(tempDir + File.separator + "echo.stderr");
+
+        applicationContext.setApplicationDeploymentDescription(appDesc);
+
+        /*
+           * Service
+           */
+        ServiceDescription serv = new ServiceDescription();
+        serv.getType().setName("SimpleEcho");
+
+        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+        InputParameterType input = InputParameterType.Factory.newInstance();
+        input.setParameterName("echo_input");
+        input.setParameterType(StringParameterType.Factory.newInstance());
+        inputList.add(input);
+        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+                .size()]);
+
+        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+        OutputParameterType output = OutputParameterType.Factory.newInstance();
+        output.setParameterName("echo_output");
+        output.setParameterType(StringParameterType.Factory.newInstance());
+        outputList.add(output);
+        OutputParameterType[] outputParamList = outputList
+                .toArray(new OutputParameterType[outputList.size()]);
+
+        serv.getType().setInputParametersArray(inputParamList);
+        serv.getType().setOutputParametersArray(outputParamList);
+
+        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
+        jobExecutionContext.setApplicationContext(applicationContext);
+        /*
+        * Host
+        */
+        applicationContext.setServiceDescription(serv);
+
+        MessageContext inMessage = new MessageContext();
+        ActualParameter echo_input = new ActualParameter();
+        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
+        inMessage.addParameter("echo_input", echo_input);
+
+        jobExecutionContext.setInMessageContext(inMessage);
+
+        MessageContext outMessage = new MessageContext();
+        ActualParameter echo_out = new ActualParameter();
+//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+        outMessage.addParameter("echo_output", echo_out);
+
+        jobExecutionContext.setOutMessageContext(outMessage);
+
+    }
+
+    @Test
+    public void testLocalProvider() throws GFacException {
+        GFacImpl gFacAPI = new GFacImpl();
+        gFacAPI.submitJob(jobExecutionContext);
+        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
+        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/test/resources/PBSTemplate.xslt b/modules/gfac/gfac-local/src/test/resources/PBSTemplate.xslt
new file mode 100644
index 0000000..e749e9c
--- /dev/null
+++ b/modules/gfac/gfac-local/src/test/resources/PBSTemplate.xslt
@@ -0,0 +1,73 @@
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
+	distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
+	the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
+	obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
+	in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+	ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
+	the License. -->
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
+<xsl:output method="text" />
+<xsl:template match="/ns:JobDescriptor">
+#! /bin/sh
+# PBS batch job script built by Globus job manager
+#   <xsl:choose>
+    <xsl:when test="ns:shellName">
+##PBS -S <xsl:value-of select="ns:shellName"/>
+    </xsl:when></xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:queueName">
+#PBS -q <xsl:value-of select="ns:queueName"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:mailOptions">
+#PBS -m <xsl:value-of select="ns:mailOptions"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+<xsl:when test="ns:acountString">
+#PBS -A <xsl:value-of select="ns:acountString"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:maxWallTime">
+#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:standardOutFile">
+#PBS -o <xsl:value-of select="ns:standardOutFile"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:standardOutFile">
+#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
+#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of select="ns:processesPerNode"/>
+<xsl:text>&#xa;</xsl:text>
+    </xsl:when>
+    </xsl:choose>
+<xsl:for-each select="ns:exports/ns:name">
+<xsl:value-of select="."/>=<xsl:value-of select="./@value"/><xsl:text>&#xa;</xsl:text>
+export<xsl:text>   </xsl:text><xsl:value-of select="."/>
+<xsl:text>&#xa;</xsl:text>
+</xsl:for-each>
+<xsl:for-each select="ns:preJobCommands/ns:command">
+      <xsl:value-of select="."/><xsl:text>   </xsl:text>
+    </xsl:for-each>
+cd <xsl:text>   </xsl:text><xsl:value-of select="ns:workingDirectory"/><xsl:text>&#xa;</xsl:text>
+    <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
+<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text>   </xsl:text></xsl:when></xsl:choose><xsl:value-of select="ns:executablePath"/><xsl:text>   </xsl:text>
+<xsl:for-each select="ns:inputs/ns:input">
+      <xsl:value-of select="."/><xsl:text>   </xsl:text>
+    </xsl:for-each>
+<xsl:for-each select="ns:postJobCommands/ns:command">
+      <xsl:value-of select="."/><xsl:text>   </xsl:text>
+</xsl:for-each>
+
+</xsl:template>
+
+</xsl:stylesheet>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/test/resources/gfac-config.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/test/resources/gfac-config.xml b/modules/gfac/gfac-local/src/test/resources/gfac-config.xml
new file mode 100644
index 0000000..61dca4f
--- /dev/null
+++ b/modules/gfac/gfac-local/src/test/resources/gfac-config.xml
@@ -0,0 +1,90 @@
+<!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
+    contributor license agreements. See the NOTICE file ~ distributed with this
+    work for additional information ~ regarding copyright ownership. The ASF
+    licenses this file ~ to you under the Apache License, Version 2.0 (the ~
+    "License"); you may not use this file except in compliance ~ with the License.
+    You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
+    ~ ~ Unless required by applicable law or agreed to in writing, ~ software
+    distributed under the License is distributed on an ~ "AS IS" BASIS, WITHOUT
+    WARRANTIES OR CONDITIONS OF ANY ~ KIND, either express or implied. See the
+    License for the ~ specific language governing permissions and limitations
+    ~ under the License. -->
+    
+<GFac>
+    <GlobalHandlers>
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.AppDescriptorCheckHandler">
+                    <property name="name" value="value"/>
+            </Handler>
+        </InHandlers>
+        <OutHandlers></OutHandlers>
+    </GlobalHandlers>
+    <Provider class="org.apache.airavata.gfac.provider.impl.LocalProvider" host="org.apache.airavata.schemas.gfac.impl.HostDescriptionTypeImpl">
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.LocalDirectorySetupHandler"/>
+        </InHandlers>
+    </Provider>
+    <Provider class="org.apache.airavata.gfac.provider.impl.GramProvider" host="org.apache.airavata.schemas.gfac.impl.GlobusHostTypeImpl">
+        <property name="name" value="value"/>
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GramDirectorySetupHandler">
+                    <property name="name" value="value"/>
+            </Handler>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+      <Provider class="org.apache.airavata.gfac.provider.impl.BESProvider" host="org.apache.airavata.schemas.gfac.impl.UnicoreHostTypeImpl">
+        <InHandlers>
+        	<Handler class="org.apache.airavata.gfac.handler.GramDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+
+    <Provider class="org.apache.airavata.gfac.ec2.EC2Provider" host="org.apache.airavata.schemas.gfac.impl.Ec2HostTypeImpl">
+        <InHandlers/>
+        <OutHandlers/>
+    </Provider>
+
+    <Provider class="org.apache.airavata.gfac.provider.impl.HadoopProvider" host="org.apache.airavata.schemas.gfac.impl.HadoopHostTypeImpl">
+        <InHandlers>
+        	<Handler class="org.apache.airavata.gfac.handler.HadoopDeploymentHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.HDFSDataMovementHandler"/>
+        </InHandlers>
+        <OutHandlers/>
+    </Provider>
+
+    <Application name="UltraScan">
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GramDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPOutputHandler"/>
+        </OutHandlers>
+    </Application>
+
+     <Provider class="org.apache.airavata.gfac.provider.impl.SSHProvider" host="org.apache.airavata.schemas.gfac.impl.SSHHostTypeImpl">
+         <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.SCPDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.SCPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.SCPOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+     <Provider class="org.apache.airavata.gfac.provider.impl.GSISSHProvider" host="org.apache.airavata.schemas.gfac.impl.GsisshHostTypeImpl">
+         <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.SCPDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.SCPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.SCPOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+</GFac>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/989a1fd5/modules/gfac/gfac-local/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/test/resources/logging.properties b/modules/gfac/gfac-local/src/test/resources/logging.properties
new file mode 100644
index 0000000..0584d38
--- /dev/null
+++ b/modules/gfac/gfac-local/src/test/resources/logging.properties
@@ -0,0 +1,42 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#default/fallback log4j configuration
+#
+
+# Set root logger level to WARN and its only appender to A1.
+log4j.rootLogger=INFO, A1, A2
+
+# A1 is set to be a rolling file appender with default params
+log4j.appender.A1=org.apache.log4j.RollingFileAppender
+log4j.appender.A1.File=target/seclogs.txt
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
+
+# A2 is a console appender
+log4j.appender.A2=org.apache.log4j.ConsoleAppender
+
+# A2 uses PatternLayout.
+log4j.appender.A2.layout=org.apache.log4j.PatternLayout
+log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
+
+log4j.logger.unicore.security=INFO
+


[5/7] git commit: seperate gfac-local from gfac-core

Posted by la...@apache.org.
seperate gfac-local from gfac-core


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/1852d792
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/1852d792
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/1852d792

Branch: refs/heads/temp
Commit: 1852d7923251a93db5b40d4bc33d08c297fb92c1
Parents: 761b81e
Author: Nipun Udara <y....@gmail.com>
Authored: Sun Apr 27 06:17:46 2014 +0530
Committer: Nipun Udara <y....@gmail.com>
Committed: Sun Apr 27 06:17:46 2014 +0530

----------------------------------------------------------------------
 modules/distribution/server/pom.xml | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/1852d792/modules/distribution/server/pom.xml
----------------------------------------------------------------------
diff --git a/modules/distribution/server/pom.xml b/modules/distribution/server/pom.xml
index 6a1c14e..fed3646 100644
--- a/modules/distribution/server/pom.xml
+++ b/modules/distribution/server/pom.xml
@@ -306,6 +306,11 @@
         </dependency>
         <dependency>
             <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-gfac-local</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
             <artifactId>airavata-gfac-core</artifactId>
             <version>${project.version}</version>
         </dependency>


[3/7] git commit: seperate gfac-local from gfac-core

Posted by la...@apache.org.
seperate gfac-local from gfac-core


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/216e2d6c
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/216e2d6c
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/216e2d6c

Branch: refs/heads/temp
Commit: 216e2d6ce2022304049df35f2d287dcaa35ecdab
Parents: cb0db78
Author: Nipun Udara <y....@gmail.com>
Authored: Sat Apr 26 15:28:00 2014 +0530
Committer: Nipun Udara <y....@gmail.com>
Committed: Sat Apr 26 15:28:00 2014 +0530

----------------------------------------------------------------------
 modules/gfac/gfac-local/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/216e2d6c/modules/gfac/gfac-local/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/pom.xml b/modules/gfac/gfac-local/pom.xml
index 580424b..9310f22 100644
--- a/modules/gfac/gfac-local/pom.xml
+++ b/modules/gfac/gfac-local/pom.xml
@@ -20,7 +20,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>airavata-gfac-local</artifactId>
     <name>Airavata GFac Local implementation</name>
-    <description>The core GFAC functionality independent from any webservice implementation.</description>
+    <description>This is the extension of GFAC Local.</description>
     <url>http://airavata.apache.org/</url>
 
     <dependencies>


[2/7] git commit: Seperate out gfac-local from gfac-core

Posted by la...@apache.org.
Seperate out gfac-local from gfac-core


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/cb0db780
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/cb0db780
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/cb0db780

Branch: refs/heads/temp
Commit: cb0db780efa246a9cd4756223e41659823e1303a
Parents: 989a1fd
Author: Nipun Udara <y....@gmail.com>
Authored: Sat Apr 26 11:15:04 2014 +0530
Committer: Nipun Udara <y....@gmail.com>
Committed: Sat Apr 26 11:15:04 2014 +0530

----------------------------------------------------------------------
 .../server/src/main/assembly/bin-assembly.xml   |   1 +
 .../handler/LocalDirectorySetupHandler.java     |  62 -----
 .../gfac/provider/impl/LocalProvider.java       | 240 -------------------
 .../gfac/utils/InputStreamToFileWriter.java     |  68 ------
 .../apache/airavata/gfac/utils/InputUtils.java  |  39 ---
 .../airavata/gfac/utils/LocalProviderUtil.java  |  54 -----
 .../gfac/services/impl/LocalProviderTest.java   | 150 ------------
 modules/gfac/pom.xml                            |   1 +
 8 files changed, 2 insertions(+), 613 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/distribution/server/src/main/assembly/bin-assembly.xml
----------------------------------------------------------------------
diff --git a/modules/distribution/server/src/main/assembly/bin-assembly.xml b/modules/distribution/server/src/main/assembly/bin-assembly.xml
index 47cccb6..acf48e5 100644
--- a/modules/distribution/server/src/main/assembly/bin-assembly.xml
+++ b/modules/distribution/server/src/main/assembly/bin-assembly.xml
@@ -197,6 +197,7 @@
                 <include>org.apache.airavata:airavata-credential-store:jar</include>
                 <include>org.apache.airavata:airavata-gfac-core:jar</include>
                 <include>org.apache.airavata:airavata-gfac-ssh:jar</include>
+                <include>org.apache.airavata:airavata-gfac-local:jar</include>
                 <include>org.apache.airavata:airavata-gfac-gsissh:jar</include>
                 <include>org.apache.airavata:airavata-gfac-bes:jar</include>
                 <include>org.apache.airavata:airavata-gfac-gram:jar</include>

http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java
deleted file mode 100644
index 8174a3d..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/LocalDirectorySetupHandler.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.handler;
-
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.util.Map;
-
-public class LocalDirectorySetupHandler implements GFacHandler{
-    private static final Logger log = LoggerFactory.getLogger(LocalDirectorySetupHandler.class);
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        log.info("Invoking GramDirectorySetupHandler ...");
-        HostDescriptionType type = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
-        log.debug("working diectroy = " + app.getStaticWorkingDirectory());
-        log.debug("temp directory = " + app.getScratchWorkingDirectory());
-
-        makeFileSystemDir(app.getStaticWorkingDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getScratchWorkingDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getInputDataDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getOutputDataDirectory(),jobExecutionContext);
-    }
-    private void makeFileSystemDir(String dir, JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-           File f = new File(dir);
-           if (f.isDirectory() && f.exists()) {
-               return;
-           } else if (!new File(dir).mkdir()) {
-               throw new GFacHandlerException("Cannot make directory "+dir);
-           }
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java
deleted file mode 100644
index a12bf5d..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/LocalProvider.java
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.provider.impl;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.notification.events.StartExecutionEvent;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.gfac.provider.utils.ProviderUtils;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.gfac.utils.InputStreamToFileWriter;
-import org.apache.airavata.gfac.utils.InputUtils;
-import org.apache.airavata.gfac.utils.OutputUtils;
-import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.model.workspace.experiment.JobDetails;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.NameValuePairType;
-import org.apache.xmlbeans.XmlException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-public class LocalProvider extends AbstractProvider{
-    private static final Logger log = LoggerFactory.getLogger(LocalProvider.class);
-    private ProcessBuilder builder;
-    private List<String> cmdList;
-    private String jobId;
-    
-    public static class LocalProviderJobData{
-    	private String applicationName;
-    	private List<String> inputParameters;
-    	private String workingDir;
-    	private String inputDir;
-    	private String outputDir;
-		public String getApplicationName() {
-			return applicationName;
-		}
-		public void setApplicationName(String applicationName) {
-			this.applicationName = applicationName;
-		}
-		public List<String> getInputParameters() {
-			return inputParameters;
-		}
-		public void setInputParameters(List<String> inputParameters) {
-			this.inputParameters = inputParameters;
-		}
-		public String getWorkingDir() {
-			return workingDir;
-		}
-		public void setWorkingDir(String workingDir) {
-			this.workingDir = workingDir;
-		}
-		public String getInputDir() {
-			return inputDir;
-		}
-		public void setInputDir(String inputDir) {
-			this.inputDir = inputDir;
-		}
-		public String getOutputDir() {
-			return outputDir;
-		}
-		public void setOutputDir(String outputDir) {
-			this.outputDir = outputDir;
-		}
-    }
-    public LocalProvider(){
-        cmdList = new ArrayList<String>();
-    }
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
-    	super.initialize(jobExecutionContext);
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription().getType();
-
-        buildCommand(app.getExecutableLocation(), ProviderUtils.getInputParameters(jobExecutionContext));
-        initProcessBuilder(app);
-
-        // extra environment variables
-        builder.environment().put(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
-        builder.environment().put(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());
-
-        // set working directory
-        builder.directory(new File(app.getStaticWorkingDirectory()));
-
-        // log info
-        log.info("Command = " + InputUtils.buildCommand(cmdList));
-        log.info("Working dir = " + builder.directory());
-        for (String key : builder.environment().keySet()) {
-            log.info("Env[" + key + "] = " + builder.environment().get(key));
-        }
-    }
-
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-         ApplicationDeploymentDescriptionType app = jobExecutionContext.
-                 getApplicationContext().getApplicationDeploymentDescription().getType();
-        JobDetails jobDetails = new JobDetails();
-        try {
-        	jobId= jobExecutionContext.getTaskData().getTaskID();
-            jobDetails.setJobID(jobId);
-            jobDetails.setJobDescription(app.toString());
-            jobExecutionContext.setJobDetails(jobDetails);
-            JobDescriptor jobDescriptor = GFacUtils.createJobDescriptor(jobExecutionContext, app, null);
-            jobDetails.setJobDescription(jobDescriptor.toXML());
-            GFacUtils.saveJobStatus(jobDetails, JobState.SETUP, jobExecutionContext.getTaskData().getTaskID());
-        	// running cmd
-            Process process = builder.start();
-
-            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), app.getStandardOutput());
-            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), app.getStandardError());
-
-            // start output threads
-            standardOutWriter.setDaemon(true);
-            standardErrorWriter.setDaemon(true);
-            standardOutWriter.start();
-            standardErrorWriter.start();
-
-            int returnValue = process.waitFor();
-
-            // make sure other two threads are done
-            standardOutWriter.join();
-            standardErrorWriter.join();
-
-            /*
-             * check return value. usually not very helpful to draw conclusions based on return values so don't bother.
-             * just provide warning in the log messages
-             */
-            if (returnValue != 0) {
-                log.error("Process finished with non zero return value. Process may have failed");
-            } else {
-                log.info("Process finished with return value of zero.");
-            }
-
-            StringBuffer buf = new StringBuffer();
-            buf.append("Executed ").append(InputUtils.buildCommand(cmdList))
-                    .append(" on the localHost, working directory = ").append(app.getStaticWorkingDirectory())
-                    .append(" tempDirectory = ").append(app.getScratchWorkingDirectory()).append(" With the status ")
-                    .append(String.valueOf(returnValue));
-            log.info(buf.toString());
-        } catch (IOException io) {
-            throw new GFacProviderException(io.getMessage(), io);
-        } catch (InterruptedException e) {
-            throw new GFacProviderException(e.getMessage(), e);
-        }catch (GFacException e) {
-            throw new GFacProviderException(e.getMessage(), e);
-        }
-    }
-
-//	private void saveApplicationJob(JobExecutionContext jobExecutionContext)
-//			throws GFacProviderException {
-//		ApplicationDeploymentDescriptionType app = jobExecutionContext.
-//                getApplicationContext().getApplicationDeploymentDescription().getType();
-//		ApplicationJob appJob = GFacUtils.createApplicationJob(jobExecutionContext);
-//		appJob.setJobId(jobId);
-//		LocalProviderJobData data = new LocalProviderJobData();
-//		data.setApplicationName(app.getExecutableLocation());
-//		data.setInputDir(app.getInputDataDirectory());
-//		data.setOutputDir(app.getOutputDataDirectory());
-//		data.setWorkingDir(builder.directory().toString());
-//		data.setInputParameters(ProviderUtils.getInputParameters(jobExecutionContext));
-//		ByteArrayOutputStream stream = new ByteArrayOutputStream();
-//		JAXB.marshal(data, stream);
-//		appJob.setJobData(stream.toString());
-//		appJob.setSubmittedTime(Calendar.getInstance().getTime());
-//		appJob.setStatus(ApplicationJobStatus.SUBMITTED);
-//		appJob.setStatusUpdateTime(appJob.getSubmittedTime());
-//		GFacUtils.recordApplicationJob(jobExecutionContext, appJob);
-//	}
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-
-        try {
-            String stdOutStr = GFacUtils.readFileToString(app.getStandardOutput());
-            String stdErrStr = GFacUtils.readFileToString(app.getStandardError());
-			Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
-            OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr);
-        } catch (XmlException e) {
-            throw new GFacProviderException("Cannot read output:" + e.getMessage(), e);
-        } catch (IOException io) {
-            throw new GFacProviderException(io.getMessage(), io);
-        } catch (Exception e){
-        	throw new GFacProviderException("Error in retrieving results",e);
-        }
-    }
-
-    public void cancelJob(String jobId, JobExecutionContext jobExecutionContext) throws GFacException {
-        throw new NotImplementedException();
-    }
-
-
-    private void buildCommand(String executable, List<String> inputParameterList){
-        cmdList.add(executable);
-        cmdList.addAll(inputParameterList);
-    }
-
-    private void initProcessBuilder(ApplicationDeploymentDescriptionType app){
-        builder = new ProcessBuilder(cmdList);
-
-        NameValuePairType[] env = app.getApplicationEnvironmentArray();
-
-        if(env != null && env.length > 0){
-            Map<String,String> builderEnv = builder.environment();
-            for (NameValuePairType entry : env) {
-                builderEnv.put(entry.getName(), entry.getValue());
-            }
-        }
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java
deleted file mode 100644
index 691e11d..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputStreamToFileWriter.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.*;
-
-public class InputStreamToFileWriter extends Thread{
-    protected final Logger log = LoggerFactory.getLogger(this.getClass());
-
-    private BufferedReader in;
-    private BufferedWriter out;
-
-    public InputStreamToFileWriter(InputStream in, String out) throws IOException {
-        this.in = new BufferedReader(new InputStreamReader(in));
-        this.out = new BufferedWriter(new FileWriter(out));
-    }
-
-    public void run() {
-        try {
-            String line = null;
-            while ((line = in.readLine()) != null) {
-                if (log.isDebugEnabled()) {
-                    log.debug(line);
-                }
-                out.write(line);
-                out.newLine();
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            if (in != null) {
-                try {
-                    in.close();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-            if (out != null) {
-                try {
-                    out.close();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java
deleted file mode 100644
index dedfa29..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/InputUtils.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-
-import java.util.List;
-
-public class InputUtils {
-    private static final String SPACE = " ";
-
-    private InputUtils() {
-    }
-
-    public static String buildCommand(List<String> cmdList) {
-        StringBuffer buff = new StringBuffer();
-        for (String string : cmdList) {
-            buff.append(string);
-            buff.append(SPACE);
-        }
-        return buff.toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java
deleted file mode 100644
index c80eeda..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/LocalProviderUtil.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-
-public class LocalProviderUtil {
-    private static final Logger log = LoggerFactory.getLogger(LocalProviderUtil.class);
-
-    private void makeFileSystemDir(String dir) throws GFacProviderException {
-        File f = new File(dir);
-        if (f.isDirectory() && f.exists()) {
-            return;
-        } else if (!new File(dir).mkdir()) {
-            throw new GFacProviderException("Cannot make directory " + dir);
-        }
-    }
-
-    public void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.
-                getApplicationContext().getApplicationDeploymentDescription().getType();
-        log.info("working diectroy = " + app.getStaticWorkingDirectory());
-        log.info("temp directory = " + app.getScratchWorkingDirectory());
-        makeFileSystemDir(app.getStaticWorkingDirectory());
-        makeFileSystemDir(app.getScratchWorkingDirectory());
-        makeFileSystemDir(app.getInputDataDirectory());
-        makeFileSystemDir(app.getOutputDataDirectory());
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
deleted file mode 100644
index c33a8bc..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import org.apache.airavata.commons.gfac.type.*;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.ApplicationContext;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.cpi.GFacImpl;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.lang.SystemUtils;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-public class LocalProviderTest {
-    private JobExecutionContext jobExecutionContext;
-    @Before
-    public void setUp() throws Exception {
-
-        URL resource = this.getClass().getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        File configFile = new File(resource.getPath());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(configFile, null, null);
-        //have to set InFlwo Handlers and outFlowHandlers
-        ApplicationContext applicationContext = new ApplicationContext();
-        HostDescription host = new HostDescription();
-        host.getType().setHostName("localhost");
-        host.getType().setHostAddress("localhost");
-        applicationContext.setHostDescription(host);
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new ApplicationDescription();
-        ApplicationDeploymentDescriptionType app = appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        if (SystemUtils.IS_OS_WINDOWS) {
-            URL url = this.getClass().getClassLoader().getResource("echo.bat");
-            app.setExecutableLocation(url.getFile());
-        } else {
-            //for unix and Mac
-            app.setExecutableLocation("/bin/echo");
-        }
-
-        /*
-           * Default tmp location
-           */
-        String tempDir = System.getProperty("java.io.tmpdir");
-        if (tempDir == null) {
-            tempDir = "/tmp";
-        }
-
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "input");
-        app.setOutputDataDirectory(tempDir + File.separator + "output");
-        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
-        app.setStandardError(tempDir + File.separator + "echo.stderr");
-
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-                .size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
-        jobExecutionContext.setApplicationContext(applicationContext);
-        /*
-        * Host
-        */
-        applicationContext.setServiceDescription(serv);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-    @Test
-    public void testLocalProvider() throws GFacException {
-        GFacImpl gFacAPI = new GFacImpl();
-        gFacAPI.submitJob(jobExecutionContext);
-        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/cb0db780/modules/gfac/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/pom.xml b/modules/gfac/pom.xml
index 774060c..d2ee2a5 100644
--- a/modules/gfac/pom.xml
+++ b/modules/gfac/pom.xml
@@ -33,6 +33,7 @@
                 <module>gfac-core</module>
                 <module>gfac-ec2</module>
                 <module>gfac-ssh</module>
+                <module>gfac-local</module>
 		<module>gfac-gram</module>
                 <module>gfac-gsissh</module>
 		<module>gfac-bes</module>


[7/7] git commit: seperate gfac-hadoop from gfac-core

Posted by la...@apache.org.
seperate gfac-hadoop from gfac-core


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/9a787309
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/9a787309
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/9a787309

Branch: refs/heads/temp
Commit: 9a7873094ec2238f2e01fe0a001a556b749771fe
Parents: 1852d79
Author: Nipun Udara <y....@gmail.com>
Authored: Sun Apr 27 08:39:16 2014 +0530
Committer: Nipun Udara <y....@gmail.com>
Committed: Sun Apr 27 08:39:16 2014 +0530

----------------------------------------------------------------------
 modules/distribution/server/pom.xml             |   5 +
 .../server/src/main/assembly/bin-assembly.xml   |   1 +
 modules/gfac/gfac-core/pom.xml                  |  44 ---
 .../airavata/gfac/AbstractSecurityContext.java  |   1 -
 .../gfac/handler/HDFSDataMovementHandler.java   | 105 -------
 .../gfac/handler/HadoopDeploymentHandler.java   | 274 -------------------
 .../gfac/provider/impl/HadoopProvider.java      | 153 -----------
 .../gfac/provider/utils/HadoopUtils.java        |  60 ----
 modules/gfac/gfac-gsissh/pom.xml                |   6 +-
 modules/gfac/gfac-hadoop/pom.xml                | 117 ++++++++
 .../gfac/handler/HDFSDataMovementHandler.java   | 102 +++++++
 .../gfac/handler/HadoopDeploymentHandler.java   | 274 +++++++++++++++++++
 .../gfac/provider/impl/HadoopProvider.java      | 153 +++++++++++
 .../gfac/provider/utils/HadoopUtils.java        |  60 ++++
 .../src/main/resources/errors.properties        | 197 +++++++++++++
 .../src/main/resources/service.properties       |  58 ++++
 .../src/test/resources/PBSTemplate.xslt         |  73 +++++
 .../src/test/resources/gfac-config.xml          |  90 ++++++
 .../src/test/resources/logging.properties       |  42 +++
 modules/gfac/gfac-ssh/pom.xml                   |   5 +
 modules/gfac/pom.xml                            |   3 +-
 21 files changed, 1184 insertions(+), 639 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/distribution/server/pom.xml
----------------------------------------------------------------------
diff --git a/modules/distribution/server/pom.xml b/modules/distribution/server/pom.xml
index fed3646..202db63 100644
--- a/modules/distribution/server/pom.xml
+++ b/modules/distribution/server/pom.xml
@@ -311,6 +311,11 @@
         </dependency>
         <dependency>
             <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-gfac-hadoop</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
             <artifactId>airavata-gfac-core</artifactId>
             <version>${project.version}</version>
         </dependency>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/distribution/server/src/main/assembly/bin-assembly.xml
----------------------------------------------------------------------
diff --git a/modules/distribution/server/src/main/assembly/bin-assembly.xml b/modules/distribution/server/src/main/assembly/bin-assembly.xml
index acf48e5..b5faaa5 100644
--- a/modules/distribution/server/src/main/assembly/bin-assembly.xml
+++ b/modules/distribution/server/src/main/assembly/bin-assembly.xml
@@ -199,6 +199,7 @@
                 <include>org.apache.airavata:airavata-gfac-ssh:jar</include>
                 <include>org.apache.airavata:airavata-gfac-local:jar</include>
                 <include>org.apache.airavata:airavata-gfac-gsissh:jar</include>
+                <include>org.apache.airavata:airavata-gfac-hadoop:jar</include>
                 <include>org.apache.airavata:airavata-gfac-bes:jar</include>
                 <include>org.apache.airavata:airavata-gfac-gram:jar</include>
                 <include>org.apache.airavata:airavata-client-api:jar</include>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-core/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/pom.xml b/modules/gfac/gfac-core/pom.xml
index 4762c3e..db8d3f5 100644
--- a/modules/gfac/gfac-core/pom.xml
+++ b/modules/gfac/gfac-core/pom.xml
@@ -103,56 +103,12 @@
             <scope>test</scope>
         </dependency>
 
-
-
-        <!-- Hadoop provider related dependencies -->
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
-            <version>1.0.3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <version>1.0.3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.whirr</groupId>
-            <artifactId>whirr-core</artifactId>
-            <version>0.7.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.whirr</groupId>
-            <artifactId>whirr-hadoop</artifactId>
-            <version>0.7.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.hamcrest</groupId>
-            <artifactId>hamcrest-all</artifactId>
-            <version>1.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <version>1.8.5</version>
-        </dependency>
-        <dependency>
-            <groupId>commons-configuration</groupId>
-            <artifactId>commons-configuration</artifactId>
-            <version>1.7</version>
-        </dependency>
-        <dependency>
-            <groupId>net.sf.jopt-simple</groupId>
-            <artifactId>jopt-simple</artifactId>
-            <version>3.2</version>
-        </dependency>
         <!-- Guava -->
         <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
             <version>12.0</version>
         </dependency>
-
         <!-- gsi-ssh api dependencies -->
         <dependency>
             <groupId>org.apache.airavata</groupId>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/AbstractSecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/AbstractSecurityContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/AbstractSecurityContext.java
index 2605511..33b7846 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/AbstractSecurityContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/AbstractSecurityContext.java
@@ -29,7 +29,6 @@ package org.apache.airavata.gfac;
 
 import org.apache.airavata.credential.store.store.CredentialReader;
 import org.apache.airavata.gfac.SecurityContext;
-import org.apache.commons.math.stat.clustering.Cluster;
 
 /**
  * Abstract implementation of the security context.

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java
deleted file mode 100644
index f6ac573..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.handler;
-
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.handler.GFacHandler;
-import org.apache.airavata.gfac.handler.GFacHandlerException;
-import org.apache.airavata.gfac.provider.utils.HadoopUtils;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.Map;
-
-public class HDFSDataMovementHandler implements GFacHandler {
-    private static final Logger logger = LoggerFactory.getLogger(HDFSDataMovementHandler.class);
-
-    private boolean isWhirrBasedDeployment = false;
-    private File hadoopConfigDir;
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
-            isWhirrBasedDeployment = true;
-        } else {
-            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
-            File hadoopConfigDir = new File(hadoopConfigDirPath);
-            if (!hadoopConfigDir.exists()){
-                throw new GFacHandlerException("Specified hadoop configuration directory doesn't exist.");
-            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
-                throw new GFacHandlerException("Cannot find any hadoop configuration files inside specified directory.");
-            }
-
-            this.hadoopConfigDir = hadoopConfigDir;
-        }
-
-        if(jobExecutionContext.isInPath()){
-            try {
-                handleInPath(jobExecutionContext);
-            } catch (IOException e) {
-                throw new GFacHandlerException("Error while copying input data from local file system to HDFS.",e);
-            }
-        } else {
-            handleOutPath(jobExecutionContext);
-        }
-    }
-
-    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException, IOException {
-        ApplicationDeploymentDescriptionType appDepDesc =
-                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
-                (HadoopApplicationDeploymentDescriptionType)appDepDesc;
-        if(appDepDesc.isSetInputDataDirectory() && isInputDataDirectoryLocal(appDepDesc)){
-            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
-            FileSystem hdfs = FileSystem.get(hadoopConf);
-            hdfs.copyFromLocalFile(new Path(appDepDesc.getInputDataDirectory()),
-                    new Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
-        }
-    }
-
-    private boolean isInputDataDirectoryLocal(ApplicationDeploymentDescriptionType appDepDesc){
-        String inputDataDirectoryPath = appDepDesc.getInputDataDirectory();
-        File inputDataDirectory = new File(inputDataDirectoryPath);
-        if(inputDataDirectory.exists() && FileUtils.listFiles(inputDataDirectory, null, null).size() > 0){
-            return true;
-        }
-
-        return false;
-    }
-
-    private void handleOutPath(JobExecutionContext jobExecutionContext){}
-
-    public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java
deleted file mode 100644
index f72afd1..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.handler;
-
-import com.google.common.io.Files;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.schemas.gfac.HadoopHostType;
-import org.apache.commons.configuration.CompositeConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.io.FileUtils;
-import org.apache.whirr.Cluster;
-import org.apache.whirr.ClusterController;
-import org.apache.whirr.ClusterControllerFactory;
-import org.apache.whirr.ClusterSpec;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.*;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.EnumSet;
-import java.util.Map;
-import java.util.Properties;
-
-import static org.apache.whirr.ClusterSpec.Property.*;
-import static org.apache.whirr.ClusterSpec.Property.INSTANCE_TEMPLATES;
-import static org.apache.whirr.ClusterSpec.Property.PRIVATE_KEY_FILE;
-
-/**
- * This handler takes care of deploying hadoop in cloud(in cloud bursting scenarios) and
- * deploying hadoop in local cluster. In case of existing hadoop cluster this will ignore
- * cluster setup just use the hadoop configuration provided by user.
- */
-public class HadoopDeploymentHandler implements GFacHandler {
-    private static final Logger logger = LoggerFactory.getLogger("hadoop-dep-handler");
-
-    /**
-     * Once invoked this method will deploy Hadoop in a local cluster or cloud based on the
-     * configuration provided. If there is a already deployed hadoop cluster this will skip
-     * deployment.
-     *
-     * @param jobExecutionContext job execution context containing all the required configurations
-     *                            and runtime information.
-     * @throws GFacHandlerException
-     */
-    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        if(jobExecutionContext.isInPath()){
-            handleInPath(jobExecutionContext);
-        } else {
-            handleOutPath(jobExecutionContext);
-        }
-    }
-
-    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        HostDescription hostDescription =
-                jobExecutionContext.getApplicationContext().getHostDescription();
-        if (!isHadoopDeploymentAvailable(hostDescription)) {
-            // Temp directory to keep generated configuration files.
-            File tempDirectory = Files.createTempDir();
-            try {
-                File hadoopSiteXML = launchHadoopCluster(hostDescription, tempDirectory);
-                jobExecutionContext.getInMessageContext().addParameter("HADOOP_SITE_XML", hadoopSiteXML.getAbsolutePath());
-                jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE", "WHIRR");
-                // TODO: Add hadoop-site.xml to job execution context.
-            } catch (IOException e) {
-                throw new GFacHandlerException("IO Error while processing configurations.",e);
-            } catch (ConfigurationException e) {
-                throw  new GFacHandlerException("Whirr configuration error.", e);
-            } catch (InterruptedException e) {
-                throw new GFacHandlerException("Hadoop cluster launch interrupted.", e);
-            } catch (TransformerException e) {
-                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
-            } catch (ParserConfigurationException e) {
-                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
-            }
-        } else {
-            jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE",
-                    "MANUAL");
-            jobExecutionContext.getInMessageContext().addParameter("HADOOP_CONFIG_DIR",
-                    ((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory());
-            logger.info("Hadoop configuration is available. Skipping hadoop deployment.");
-            if(logger.isDebugEnabled()){
-                logger.debug("Hadoop configuration directory: " +
-                        getHadoopConfigDirectory(hostDescription));
-            }
-        }
-    }
-
-    private void handleOutPath(JobExecutionContext jobExecutionContext){
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        if(((String)inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE")).equals("WHIRR")){
-            // TODO: Shutdown hadoop cluster.
-            logger.info("Shutdown hadoop cluster.");
-        }
-    }
-
-    private File launchHadoopCluster(HostDescription hostDescription, File workingDirectory)
-            throws IOException, GFacHandlerException, ConfigurationException, InterruptedException, TransformerException, ParserConfigurationException {
-        ClusterSpec hadoopClusterSpec =
-                whirrConfigurationToClusterSpec(hostDescription, workingDirectory);
-        ClusterController hadoopClusterController =
-                createClusterController(hadoopClusterSpec.getServiceName());
-        Cluster hadoopCluster =  hadoopClusterController.launchCluster(hadoopClusterSpec);
-
-        logger.info(String.format("Started cluster of %s instances.\n",
-                hadoopCluster.getInstances().size()));
-
-        File siteXML = new File(workingDirectory, "hadoop-site.xml");
-        clusterPropertiesToHadoopSiteXml(hadoopCluster.getConfiguration(), siteXML);
-
-        return siteXML;
-    }
-
-    private ClusterController createClusterController(String serviceName){
-        ClusterControllerFactory factory = new ClusterControllerFactory();
-        ClusterController controller = factory.create(serviceName);
-
-        if(controller == null){
-            logger.warn("Unable to find the service {0}, using default.", serviceName);
-            controller = factory.create(null);
-        }
-
-        return controller;
-    }
-
-    private ClusterSpec whirrConfigurationToClusterSpec(HostDescription hostDescription,
-                                                        File workingDirectory) throws IOException, GFacHandlerException, ConfigurationException {
-        File whirrConfig = getWhirrConfigurationFile(hostDescription, workingDirectory);
-        CompositeConfiguration compositeConfiguration = new CompositeConfiguration();
-        Configuration configuration = new PropertiesConfiguration(whirrConfig);
-        compositeConfiguration.addConfiguration(configuration);
-
-        ClusterSpec hadoopClusterSpec = new ClusterSpec(compositeConfiguration);
-
-        for (ClusterSpec.Property required : EnumSet.of(CLUSTER_NAME, PROVIDER, IDENTITY, CREDENTIAL,
-                INSTANCE_TEMPLATES, PRIVATE_KEY_FILE)) {
-            if (hadoopClusterSpec.getConfiguration().getString(required.getConfigName()) == null) {
-                throw new IllegalArgumentException(String.format("Option '%s' not set.",
-                        required.getSimpleName()));
-            }
-        }
-
-        return hadoopClusterSpec;
-    }
-
-    private File getWhirrConfigurationFile(HostDescription hostDescription, File workingDirectory)
-            throws GFacHandlerException, IOException {
-        HadoopHostType hadoopHostDesc = (HadoopHostType)hostDescription;
-        if(hadoopHostDesc.isSetWhirrConfiguration()){
-            HadoopHostType.WhirrConfiguration whirrConfig = hadoopHostDesc.getWhirrConfiguration();
-            if(whirrConfig.isSetConfigurationFile()){
-                File whirrConfigFile = new File(whirrConfig.getConfigurationFile());
-                if(!whirrConfigFile.exists()){
-                    throw new GFacHandlerException(
-                            "Specified whirr configuration file doesn't exists.");
-                }
-
-                FileUtils.copyFileToDirectory(whirrConfigFile, workingDirectory);
-
-                return new File(workingDirectory, whirrConfigFile.getName());
-            } else if(whirrConfig.isSetConfiguration()){
-                Properties whirrConfigProps =
-                        whirrConfigurationsToProperties(whirrConfig.getConfiguration());
-                File whirrConfigFile = new File(workingDirectory, "whirr-hadoop.config");
-                whirrConfigProps.store(
-                        new FileOutputStream(whirrConfigFile), null);
-
-                return whirrConfigFile;
-            }
-        }
-
-        throw new GFacHandlerException("Cannot find Whirr configurations. Whirr configuration "
-                + "is required if you don't have already running Hadoop deployment.");
-    }
-
-    private Properties whirrConfigurationsToProperties(
-            HadoopHostType.WhirrConfiguration.Configuration configuration){
-        Properties whirrConfigProps = new Properties();
-
-        for(HadoopHostType.WhirrConfiguration.Configuration.Property property:
-                configuration.getPropertyArray()) {
-            whirrConfigProps.put(property.getName(), property.getValue());
-        }
-
-        return whirrConfigProps;
-    }
-
-    private void clusterPropertiesToHadoopSiteXml(Properties props, File hadoopSiteXml) throws ParserConfigurationException, TransformerException {
-        DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
-        DocumentBuilder documentBuilder = domFactory.newDocumentBuilder();
-
-        Document hadoopSiteXmlDoc = documentBuilder.newDocument();
-
-        hadoopSiteXmlDoc.setXmlVersion("1.0");
-        hadoopSiteXmlDoc.setXmlStandalone(true);
-        hadoopSiteXmlDoc.createProcessingInstruction("xml-stylesheet", "type=\"text/xsl\" href=\"configuration.xsl\"");
-
-        Element configEle = hadoopSiteXmlDoc.createElement("configuration");
-
-        hadoopSiteXmlDoc.appendChild(configEle);
-
-        for(Map.Entry<Object, Object> entry : props.entrySet()){
-            addPropertyToConfiguration(entry, configEle, hadoopSiteXmlDoc);
-        }
-
-        saveDomToFile(hadoopSiteXmlDoc, hadoopSiteXml);
-    }
-
-    private void saveDomToFile(Document dom, File destFile) throws TransformerException {
-        Source source = new DOMSource(dom);
-
-        Result result = new StreamResult(destFile);
-
-        Transformer transformer = TransformerFactory.newInstance().newTransformer();
-        transformer.transform(source, result);
-    }
-
-    private void addPropertyToConfiguration(Map.Entry<Object, Object> entry, Element configElement, Document doc){
-        Element property = doc.createElement("property");
-        configElement.appendChild(property);
-
-        Element nameEle = doc.createElement("name");
-        nameEle.setTextContent(entry.getKey().toString());
-        property.appendChild(nameEle);
-
-        Element valueEle = doc.createElement("value");
-        valueEle.setTextContent(entry.getValue().toString());
-        property.appendChild(valueEle);
-    }
-
-    private boolean isHadoopDeploymentAvailable(HostDescription hostDescription) {
-        return ((HadoopHostType) hostDescription.getType()).isSetHadoopConfigurationDirectory();
-    }
-
-    private String getHadoopConfigDirectory(HostDescription hostDescription){
-        return ((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory();
-    }
-
-    public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java
deleted file mode 100644
index c20e2ea..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.provider.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.ArrayList;
-import java.util.Map;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.gfac.provider.utils.HadoopUtils;
-import org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-/**
- * Executes hadoop job using the cluster configuration provided by handlers in
- * in-flow.
- */
-public class HadoopProvider extends AbstractProvider{
-    private static final Logger logger = LoggerFactory.getLogger(HadoopProvider.class);
-
-    private boolean isWhirrBasedDeployment = false;
-    private File hadoopConfigDir;
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
-            isWhirrBasedDeployment = true;
-        } else {
-            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
-            File hadoopConfigDir = new File(hadoopConfigDirPath);
-            if (!hadoopConfigDir.exists()){
-                throw new GFacProviderException("Specified hadoop configuration directory doesn't exist.");
-            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
-                throw new GFacProviderException("Cannot find any hadoop configuration files inside specified directory.");
-            }
-
-            this.hadoopConfigDir = hadoopConfigDir;
-        }
-    }
-
-    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
-                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
-                        .getApplicationContext().getApplicationDeploymentDescription().getType();
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration jobConf = hadoopAppDesc.getHadoopJobConfiguration();
-
-        try{
-            // Preparing Hadoop configuration
-            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
-                    jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
-
-            // Load jar containing map-reduce job implementation
-            ArrayList<URL> mapRedJars = new ArrayList<URL>();
-            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
-            URLClassLoader childClassLoader = new URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
-                    this.getClass().getClassLoader());
-
-            Job job = new Job(hadoopConf);
-
-            job.setJobName(jobConf.getJobName());
-
-            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), true, childClassLoader));
-            job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, childClassLoader));
-
-            job.setMapperClass((Class<? extends Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
-            job.setCombinerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
-            job.setReducerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
-
-            job.setInputFormatClass((Class<? extends InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, childClassLoader));
-            job.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));
-
-            FileInputFormat.setInputPaths(job, new Path(hadoopAppDesc.getInputDataDirectory()));
-            FileOutputFormat.setOutputPath(job, new Path(hadoopAppDesc.getOutputDataDirectory()));
-
-            job.waitForCompletion(true);
-            System.out.println(job.getTrackingURL());
-            if(jobExecutionContext.getOutMessageContext() == null){
-                jobExecutionContext.setOutMessageContext(new MessageContext());
-            }
-
-            OutputParameterType[] outputParametersArray = jobExecutionContext.getApplicationContext().
-                    getServiceDescription().getType().getOutputParametersArray();
-            for(OutputParameterType outparamType : outputParametersArray){
-                String paramName = outparamType.getParameterName();
-                if(paramName.equals("test-hadoop")){
-                    ActualParameter outParam = new ActualParameter();
-                    outParam.getType().changeType(StringParameterType.type);
-                    ((StringParameterType) outParam.getType()).setValue(job.getTrackingURL());
-                    jobExecutionContext.getOutMessageContext().addParameter("test-hadoop", outParam);
-                }
-            }
-        } catch (Exception e) {
-            String errMessage = "Error occurred during Map-Reduce job execution.";
-            logger.error(errMessage, e);
-            throw new GFacProviderException(errMessage, e);
-        }
-    }
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        // TODO: How to handle cluster shutdown. Best way is to introduce inPath/outPath to handler.
-    }
-
-    @Override
-    public void cancelJob(String jobId, JobExecutionContext jobExecutionContext) throws GFacException {
-        throw new NotImplementedException();
-    }
-
-
-    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java
deleted file mode 100644
index c3053d1..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.provider.utils;
-
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.util.Collection;
-
-public class HadoopUtils {
-    public static Configuration createHadoopConfiguration(
-            JobExecutionContext jobExecutionContext,
-            boolean isWhirrBasedDeployment,
-            File hadoopConfigDir) throws FileNotFoundException {
-        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        Configuration hadoopConf = new Configuration();
-
-        if(isWhirrBasedDeployment){
-            hadoopConf.addResource(new FileInputStream(
-                    new File((String)inMessageContext.getParameter("HADOOP_SITE_XML"))));
-        } else {
-            readHadoopClusterConfigurationFromDirectory(hadoopConfigDir, hadoopConf);
-        }
-
-        return hadoopConf;
-    }
-
-    private static void readHadoopClusterConfigurationFromDirectory(File localHadoopConfigurationDirectory, Configuration hadoopConf)
-            throws FileNotFoundException {
-        Collection hadoopConfigurationFiles =
-                FileUtils.listFiles(localHadoopConfigurationDirectory, null, false);
-        for (Object f : hadoopConfigurationFiles) {
-            hadoopConf.addResource(new FileInputStream((File)f));
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-gsissh/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/pom.xml b/modules/gfac/gfac-gsissh/pom.xml
index 7360743..b05bddf 100644
--- a/modules/gfac/gfac-gsissh/pom.xml
+++ b/modules/gfac/gfac-gsissh/pom.xml
@@ -103,6 +103,10 @@
             <artifactId>xmlbeans</artifactId>
             <version>${xmlbeans.version}</version>
         </dependency>
-
+        <dependency>
+            <groupId>net.schmizz</groupId>
+            <artifactId>sshj</artifactId>
+            <version>0.6.1</version>
+        </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/pom.xml b/modules/gfac/gfac-hadoop/pom.xml
new file mode 100644
index 0000000..af1d0a1
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/pom.xml
@@ -0,0 +1,117 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file 
+    distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under 
+    the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may 
+    obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to 
+    in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
+    ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under 
+    the License. -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.airavata</groupId>
+        <artifactId>gfac</artifactId>
+        <version>0.12-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>airavata-gfac-hadoop</artifactId>
+    <name>Airavata GFac Hadoop implementation</name>
+    <description>This is the extension of GFAC to use Hadoop.</description>
+    <url>http://airavata.apache.org/</url>
+
+    <dependencies>
+        <!-- Logging -->
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+
+        <!-- GFAC schemas -->
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-gfac-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <!-- Test -->
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>6.1.1</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-server-configuration</artifactId>
+	    <scope>test</scope>
+        </dependency>
+	    <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-client-configuration</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+
+
+        <!-- Hadoop provider related dependencies -->
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-core</artifactId>
+            <version>1.0.3</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-client</artifactId>
+            <version>1.0.3</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.whirr</groupId>
+            <artifactId>whirr-core</artifactId>
+            <version>0.7.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.whirr</groupId>
+            <artifactId>whirr-hadoop</artifactId>
+            <version>0.7.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <version>1.8.5</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-configuration</groupId>
+            <artifactId>commons-configuration</artifactId>
+            <version>1.7</version>
+        </dependency>
+        <dependency>
+            <groupId>net.sf.jopt-simple</groupId>
+            <artifactId>jopt-simple</artifactId>
+            <version>3.2</version>
+        </dependency>
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java
new file mode 100644
index 0000000..aafcdec
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HDFSDataMovementHandler.java
@@ -0,0 +1,102 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+
+package org.apache.airavata.gfac.handler;
+
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.provider.utils.HadoopUtils;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+
+public class HDFSDataMovementHandler implements GFacHandler {
+    private static final Logger logger = LoggerFactory.getLogger(HDFSDataMovementHandler.class);
+
+    private boolean isWhirrBasedDeployment = false;
+    private File hadoopConfigDir;
+
+    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
+        if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
+            isWhirrBasedDeployment = true;
+        } else {
+            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
+            File hadoopConfigDir = new File(hadoopConfigDirPath);
+            if (!hadoopConfigDir.exists()){
+                throw new GFacHandlerException("Specified hadoop configuration directory doesn't exist.");
+            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
+                throw new GFacHandlerException("Cannot find any hadoop configuration files inside specified directory.");
+            }
+
+            this.hadoopConfigDir = hadoopConfigDir;
+        }
+
+        if(jobExecutionContext.isInPath()){
+            try {
+                handleInPath(jobExecutionContext);
+            } catch (IOException e) {
+                throw new GFacHandlerException("Error while copying input data from local file system to HDFS.",e);
+            }
+        } else {
+            handleOutPath(jobExecutionContext);
+        }
+    }
+
+    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException, IOException {
+        ApplicationDeploymentDescriptionType appDepDesc =
+                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
+        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
+                (HadoopApplicationDeploymentDescriptionType)appDepDesc;
+        if(appDepDesc.isSetInputDataDirectory() && isInputDataDirectoryLocal(appDepDesc)){
+            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
+            FileSystem hdfs = FileSystem.get(hadoopConf);
+            hdfs.copyFromLocalFile(new Path(appDepDesc.getInputDataDirectory()),
+                    new Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
+        }
+    }
+
+    private boolean isInputDataDirectoryLocal(ApplicationDeploymentDescriptionType appDepDesc){
+        String inputDataDirectoryPath = appDepDesc.getInputDataDirectory();
+        File inputDataDirectory = new File(inputDataDirectoryPath);
+        if(inputDataDirectory.exists() && FileUtils.listFiles(inputDataDirectory, null, null).size() > 0){
+            return true;
+        }
+
+        return false;
+    }
+
+    private void handleOutPath(JobExecutionContext jobExecutionContext){}
+
+    public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java
new file mode 100644
index 0000000..f72afd1
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/handler/HadoopDeploymentHandler.java
@@ -0,0 +1,274 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+
+package org.apache.airavata.gfac.handler;
+
+import com.google.common.io.Files;
+import org.apache.airavata.commons.gfac.type.HostDescription;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.schemas.gfac.HadoopHostType;
+import org.apache.commons.configuration.CompositeConfiguration;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.io.FileUtils;
+import org.apache.whirr.Cluster;
+import org.apache.whirr.ClusterController;
+import org.apache.whirr.ClusterControllerFactory;
+import org.apache.whirr.ClusterSpec;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.*;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.apache.whirr.ClusterSpec.Property.*;
+import static org.apache.whirr.ClusterSpec.Property.INSTANCE_TEMPLATES;
+import static org.apache.whirr.ClusterSpec.Property.PRIVATE_KEY_FILE;
+
+/**
+ * This handler takes care of deploying hadoop in cloud(in cloud bursting scenarios) and
+ * deploying hadoop in local cluster. In case of existing hadoop cluster this will ignore
+ * cluster setup just use the hadoop configuration provided by user.
+ */
+public class HadoopDeploymentHandler implements GFacHandler {
+    private static final Logger logger = LoggerFactory.getLogger("hadoop-dep-handler");
+
+    /**
+     * Once invoked this method will deploy Hadoop in a local cluster or cloud based on the
+     * configuration provided. If there is a already deployed hadoop cluster this will skip
+     * deployment.
+     *
+     * @param jobExecutionContext job execution context containing all the required configurations
+     *                            and runtime information.
+     * @throws GFacHandlerException
+     */
+    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+        if(jobExecutionContext.isInPath()){
+            handleInPath(jobExecutionContext);
+        } else {
+            handleOutPath(jobExecutionContext);
+        }
+    }
+
+    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+        HostDescription hostDescription =
+                jobExecutionContext.getApplicationContext().getHostDescription();
+        if (!isHadoopDeploymentAvailable(hostDescription)) {
+            // Temp directory to keep generated configuration files.
+            File tempDirectory = Files.createTempDir();
+            try {
+                File hadoopSiteXML = launchHadoopCluster(hostDescription, tempDirectory);
+                jobExecutionContext.getInMessageContext().addParameter("HADOOP_SITE_XML", hadoopSiteXML.getAbsolutePath());
+                jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE", "WHIRR");
+                // TODO: Add hadoop-site.xml to job execution context.
+            } catch (IOException e) {
+                throw new GFacHandlerException("IO Error while processing configurations.",e);
+            } catch (ConfigurationException e) {
+                throw  new GFacHandlerException("Whirr configuration error.", e);
+            } catch (InterruptedException e) {
+                throw new GFacHandlerException("Hadoop cluster launch interrupted.", e);
+            } catch (TransformerException e) {
+                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
+            } catch (ParserConfigurationException e) {
+                throw new GFacHandlerException("Error while creating hadoop-site.xml", e);
+            }
+        } else {
+            jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE",
+                    "MANUAL");
+            jobExecutionContext.getInMessageContext().addParameter("HADOOP_CONFIG_DIR",
+                    ((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory());
+            logger.info("Hadoop configuration is available. Skipping hadoop deployment.");
+            if(logger.isDebugEnabled()){
+                logger.debug("Hadoop configuration directory: " +
+                        getHadoopConfigDirectory(hostDescription));
+            }
+        }
+    }
+
+    private void handleOutPath(JobExecutionContext jobExecutionContext){
+        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
+        if(((String)inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE")).equals("WHIRR")){
+            // TODO: Shutdown hadoop cluster.
+            logger.info("Shutdown hadoop cluster.");
+        }
+    }
+
+    private File launchHadoopCluster(HostDescription hostDescription, File workingDirectory)
+            throws IOException, GFacHandlerException, ConfigurationException, InterruptedException, TransformerException, ParserConfigurationException {
+        ClusterSpec hadoopClusterSpec =
+                whirrConfigurationToClusterSpec(hostDescription, workingDirectory);
+        ClusterController hadoopClusterController =
+                createClusterController(hadoopClusterSpec.getServiceName());
+        Cluster hadoopCluster =  hadoopClusterController.launchCluster(hadoopClusterSpec);
+
+        logger.info(String.format("Started cluster of %s instances.\n",
+                hadoopCluster.getInstances().size()));
+
+        File siteXML = new File(workingDirectory, "hadoop-site.xml");
+        clusterPropertiesToHadoopSiteXml(hadoopCluster.getConfiguration(), siteXML);
+
+        return siteXML;
+    }
+
+    private ClusterController createClusterController(String serviceName){
+        ClusterControllerFactory factory = new ClusterControllerFactory();
+        ClusterController controller = factory.create(serviceName);
+
+        if(controller == null){
+            logger.warn("Unable to find the service {0}, using default.", serviceName);
+            controller = factory.create(null);
+        }
+
+        return controller;
+    }
+
+    private ClusterSpec whirrConfigurationToClusterSpec(HostDescription hostDescription,
+                                                        File workingDirectory) throws IOException, GFacHandlerException, ConfigurationException {
+        File whirrConfig = getWhirrConfigurationFile(hostDescription, workingDirectory);
+        CompositeConfiguration compositeConfiguration = new CompositeConfiguration();
+        Configuration configuration = new PropertiesConfiguration(whirrConfig);
+        compositeConfiguration.addConfiguration(configuration);
+
+        ClusterSpec hadoopClusterSpec = new ClusterSpec(compositeConfiguration);
+
+        for (ClusterSpec.Property required : EnumSet.of(CLUSTER_NAME, PROVIDER, IDENTITY, CREDENTIAL,
+                INSTANCE_TEMPLATES, PRIVATE_KEY_FILE)) {
+            if (hadoopClusterSpec.getConfiguration().getString(required.getConfigName()) == null) {
+                throw new IllegalArgumentException(String.format("Option '%s' not set.",
+                        required.getSimpleName()));
+            }
+        }
+
+        return hadoopClusterSpec;
+    }
+
+    private File getWhirrConfigurationFile(HostDescription hostDescription, File workingDirectory)
+            throws GFacHandlerException, IOException {
+        HadoopHostType hadoopHostDesc = (HadoopHostType)hostDescription;
+        if(hadoopHostDesc.isSetWhirrConfiguration()){
+            HadoopHostType.WhirrConfiguration whirrConfig = hadoopHostDesc.getWhirrConfiguration();
+            if(whirrConfig.isSetConfigurationFile()){
+                File whirrConfigFile = new File(whirrConfig.getConfigurationFile());
+                if(!whirrConfigFile.exists()){
+                    throw new GFacHandlerException(
+                            "Specified whirr configuration file doesn't exists.");
+                }
+
+                FileUtils.copyFileToDirectory(whirrConfigFile, workingDirectory);
+
+                return new File(workingDirectory, whirrConfigFile.getName());
+            } else if(whirrConfig.isSetConfiguration()){
+                Properties whirrConfigProps =
+                        whirrConfigurationsToProperties(whirrConfig.getConfiguration());
+                File whirrConfigFile = new File(workingDirectory, "whirr-hadoop.config");
+                whirrConfigProps.store(
+                        new FileOutputStream(whirrConfigFile), null);
+
+                return whirrConfigFile;
+            }
+        }
+
+        throw new GFacHandlerException("Cannot find Whirr configurations. Whirr configuration "
+                + "is required if you don't have already running Hadoop deployment.");
+    }
+
+    private Properties whirrConfigurationsToProperties(
+            HadoopHostType.WhirrConfiguration.Configuration configuration){
+        Properties whirrConfigProps = new Properties();
+
+        for(HadoopHostType.WhirrConfiguration.Configuration.Property property:
+                configuration.getPropertyArray()) {
+            whirrConfigProps.put(property.getName(), property.getValue());
+        }
+
+        return whirrConfigProps;
+    }
+
+    private void clusterPropertiesToHadoopSiteXml(Properties props, File hadoopSiteXml) throws ParserConfigurationException, TransformerException {
+        DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
+        DocumentBuilder documentBuilder = domFactory.newDocumentBuilder();
+
+        Document hadoopSiteXmlDoc = documentBuilder.newDocument();
+
+        hadoopSiteXmlDoc.setXmlVersion("1.0");
+        hadoopSiteXmlDoc.setXmlStandalone(true);
+        hadoopSiteXmlDoc.createProcessingInstruction("xml-stylesheet", "type=\"text/xsl\" href=\"configuration.xsl\"");
+
+        Element configEle = hadoopSiteXmlDoc.createElement("configuration");
+
+        hadoopSiteXmlDoc.appendChild(configEle);
+
+        for(Map.Entry<Object, Object> entry : props.entrySet()){
+            addPropertyToConfiguration(entry, configEle, hadoopSiteXmlDoc);
+        }
+
+        saveDomToFile(hadoopSiteXmlDoc, hadoopSiteXml);
+    }
+
+    private void saveDomToFile(Document dom, File destFile) throws TransformerException {
+        Source source = new DOMSource(dom);
+
+        Result result = new StreamResult(destFile);
+
+        Transformer transformer = TransformerFactory.newInstance().newTransformer();
+        transformer.transform(source, result);
+    }
+
+    private void addPropertyToConfiguration(Map.Entry<Object, Object> entry, Element configElement, Document doc){
+        Element property = doc.createElement("property");
+        configElement.appendChild(property);
+
+        Element nameEle = doc.createElement("name");
+        nameEle.setTextContent(entry.getKey().toString());
+        property.appendChild(nameEle);
+
+        Element valueEle = doc.createElement("value");
+        valueEle.setTextContent(entry.getValue().toString());
+        property.appendChild(valueEle);
+    }
+
+    private boolean isHadoopDeploymentAvailable(HostDescription hostDescription) {
+        return ((HadoopHostType) hostDescription.getType()).isSetHadoopConfigurationDirectory();
+    }
+
+    private String getHadoopConfigDirectory(HostDescription hostDescription){
+        return ((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory();
+    }
+
+    public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java
new file mode 100644
index 0000000..c20e2ea
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/impl/HadoopProvider.java
@@ -0,0 +1,153 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+
+package org.apache.airavata.gfac.provider.impl;
+
+import java.io.File;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.gfac.provider.utils.HadoopUtils;
+import org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.OutputParameterType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import sun.reflect.generics.reflectiveObjects.NotImplementedException;
+
+/**
+ * Executes hadoop job using the cluster configuration provided by handlers in
+ * in-flow.
+ */
+public class HadoopProvider extends AbstractProvider{
+    private static final Logger logger = LoggerFactory.getLogger(HadoopProvider.class);
+
+    private boolean isWhirrBasedDeployment = false;
+    private File hadoopConfigDir;
+
+    public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
+        if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
+            isWhirrBasedDeployment = true;
+        } else {
+            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
+            File hadoopConfigDir = new File(hadoopConfigDirPath);
+            if (!hadoopConfigDir.exists()){
+                throw new GFacProviderException("Specified hadoop configuration directory doesn't exist.");
+            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
+                throw new GFacProviderException("Cannot find any hadoop configuration files inside specified directory.");
+            }
+
+            this.hadoopConfigDir = hadoopConfigDir;
+        }
+    }
+
+    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
+                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
+                        .getApplicationContext().getApplicationDeploymentDescription().getType();
+        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
+        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration jobConf = hadoopAppDesc.getHadoopJobConfiguration();
+
+        try{
+            // Preparing Hadoop configuration
+            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
+                    jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
+
+            // Load jar containing map-reduce job implementation
+            ArrayList<URL> mapRedJars = new ArrayList<URL>();
+            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
+            URLClassLoader childClassLoader = new URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
+                    this.getClass().getClassLoader());
+
+            Job job = new Job(hadoopConf);
+
+            job.setJobName(jobConf.getJobName());
+
+            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), true, childClassLoader));
+            job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, childClassLoader));
+
+            job.setMapperClass((Class<? extends Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
+            job.setCombinerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
+            job.setReducerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
+
+            job.setInputFormatClass((Class<? extends InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, childClassLoader));
+            job.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));
+
+            FileInputFormat.setInputPaths(job, new Path(hadoopAppDesc.getInputDataDirectory()));
+            FileOutputFormat.setOutputPath(job, new Path(hadoopAppDesc.getOutputDataDirectory()));
+
+            job.waitForCompletion(true);
+            System.out.println(job.getTrackingURL());
+            if(jobExecutionContext.getOutMessageContext() == null){
+                jobExecutionContext.setOutMessageContext(new MessageContext());
+            }
+
+            OutputParameterType[] outputParametersArray = jobExecutionContext.getApplicationContext().
+                    getServiceDescription().getType().getOutputParametersArray();
+            for(OutputParameterType outparamType : outputParametersArray){
+                String paramName = outparamType.getParameterName();
+                if(paramName.equals("test-hadoop")){
+                    ActualParameter outParam = new ActualParameter();
+                    outParam.getType().changeType(StringParameterType.type);
+                    ((StringParameterType) outParam.getType()).setValue(job.getTrackingURL());
+                    jobExecutionContext.getOutMessageContext().addParameter("test-hadoop", outParam);
+                }
+            }
+        } catch (Exception e) {
+            String errMessage = "Error occurred during Map-Reduce job execution.";
+            logger.error(errMessage, e);
+            throw new GFacProviderException(errMessage, e);
+        }
+    }
+
+    public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+        // TODO: How to handle cluster shutdown. Best way is to introduce inPath/outPath to handler.
+    }
+
+    @Override
+    public void cancelJob(String jobId, JobExecutionContext jobExecutionContext) throws GFacException {
+        throw new NotImplementedException();
+    }
+
+
+    public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java
new file mode 100644
index 0000000..c3053d1
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/provider/utils/HadoopUtils.java
@@ -0,0 +1,60 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+
+package org.apache.airavata.gfac.provider.utils;
+
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.util.Collection;
+
+public class HadoopUtils {
+    public static Configuration createHadoopConfiguration(
+            JobExecutionContext jobExecutionContext,
+            boolean isWhirrBasedDeployment,
+            File hadoopConfigDir) throws FileNotFoundException {
+        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
+        Configuration hadoopConf = new Configuration();
+
+        if(isWhirrBasedDeployment){
+            hadoopConf.addResource(new FileInputStream(
+                    new File((String)inMessageContext.getParameter("HADOOP_SITE_XML"))));
+        } else {
+            readHadoopClusterConfigurationFromDirectory(hadoopConfigDir, hadoopConf);
+        }
+
+        return hadoopConf;
+    }
+
+    private static void readHadoopClusterConfigurationFromDirectory(File localHadoopConfigurationDirectory, Configuration hadoopConf)
+            throws FileNotFoundException {
+        Collection hadoopConfigurationFiles =
+                FileUtils.listFiles(localHadoopConfigurationDirectory, null, false);
+        for (Object f : hadoopConfigurationFiles) {
+            hadoopConf.addResource(new FileInputStream((File)f));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/resources/errors.properties b/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
new file mode 100644
index 0000000..88c41b8
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
@@ -0,0 +1,197 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Directly copied from jglobus. Not a good way to manager error properties.
+1 = Parameter not supported
+2 = The RSL length is greater than the maximum allowed
+3 = No resources available
+4 = Bad directory specified
+5 = The executable does not exist
+6 = Insufficient funds
+7 = Authentication with the remote server failed
+8 = Job cancelled by user
+9 = Job cancelled by system
+
+10 = Data transfer to the server failed
+11 = The stdin file does not exist
+12 = The connection to the server failed (check host and port)
+13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
+14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
+15 = The job manager received an invalid RSL
+16 = Could not connect to job manager
+17 = The job failed when the job manager attempted to run it
+18 = Paradyn error
+19 = The provided RSL 'jobtype' value is invalid
+
+20 = The provided RSL 'myjob' value is invalid
+21 = The job manager failed to locate an internal script argument file
+22 = The job manager failed to create an internal script argument file
+23 = The job manager detected an invalid job state
+24 = The job manager detected an invalid script response
+25 = The job manager detected an invalid job state
+26 = The provided RSL 'jobtype' value is not supported by this job manager
+27 = Unimplemented
+28 = The job manager failed to create an internal script submission file
+29 = The job manager cannot find the user proxy
+
+30 = The job manager failed to open the user proxy
+31 = The job manager failed to cancel the job as requested
+32 = System memory allocation failed
+33 = The interprocess job communication initialization failed
+34 = The interprocess job communication setup failed
+35 = The provided RSL 'host count' value is invalid
+36 = One of the provided RSL parameters is unsupported
+37 = The provided RSL 'queue' parameter is invalid
+38 = The provided RSL 'project' parameter is invalid
+39 = The provided RSL string includes variables that could not be identified
+
+40 = The provided RSL 'environment' parameter is invalid
+41 = The provided RSL 'dryrun' parameter is invalid
+42 = The provided RSL is invalid (an empty string)
+43 = The job manager failed to stage the executable
+44 = The job manager failed to stage the stdin file
+45 = The requested job manager type is invalid
+46 = The provided RSL 'arguments' parameter is invalid
+47 = The gatekeeper failed to run the job manager
+48 = The provided RSL could not be properly parsed
+49 = There is a version mismatch between GRAM components
+
+50 = The provided RSL 'arguments' parameter is invalid
+51 = The provided RSL 'count' parameter is invalid
+52 = The provided RSL 'directory' parameter is invalid
+53 = The provided RSL 'dryrun' parameter is invalid
+54 = The provided RSL 'environment' parameter is invalid
+55 = The provided RSL 'executable' parameter is invalid
+56 = The provided RSL 'host_count' parameter is invalid
+57 = The provided RSL 'jobtype' parameter is invalid
+58 = The provided RSL 'maxtime' parameter is invalid
+59 = The provided RSL 'myjob' parameter is invalid
+
+60 = The provided RSL 'paradyn' parameter is invalid
+61 = The provided RSL 'project' parameter is invalid
+62 = The provided RSL 'queue' parameter is invalid
+63 = The provided RSL 'stderr' parameter is invalid
+64 = The provided RSL 'stdin' parameter is invalid
+65 = The provided RSL 'stdout' parameter is invalid
+66 = The job manager failed to locate an internal script
+67 = The job manager failed on the system call pipe()
+68 = The job manager failed on the system call fcntl()
+69 = The job manager failed to create the temporary stdout filename
+
+70 = The job manager failed to create the temporary stderr filename
+71 = The job manager failed on the system call fork()
+72 = The executable file permissions do not allow execution
+73 = The job manager failed to open stdout
+74 = The job manager failed to open stderr
+75 = The cache file could not be opened in order to relocate the user proxy
+76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
+77 = The job manager failed to insert the contact in the client contact list
+78 = The contact was not found in the job manager's client contact list
+79 = Connecting to the job manager failed.  Possible reasons: job terminated, invalid job contact, network problems, ...
+
+80 = The syntax of the job contact is invalid
+81 = The executable parameter in the RSL is undefined
+82 = The job manager service is misconfigured.  condor arch undefined
+83 = The job manager service is misconfigured.  condor os undefined
+84 = The provided RSL 'min_memory' parameter is invalid
+85 = The provided RSL 'max_memory' parameter is invalid
+86 = The RSL 'min_memory' value is not zero or greater
+87 = The RSL 'max_memory' value is not zero or greater
+88 = The creation of a HTTP message failed
+89 = Parsing incoming HTTP message failed
+
+90 = The packing of information into a HTTP message failed
+91 = An incoming HTTP message did not contain the expected information
+92 = The job manager does not support the service that the client requested
+93 = The gatekeeper failed to find the requested service
+94 = The jobmanager does not accept any new requests (shutting down)
+95 = The client failed to close the listener associated with the callback URL
+96 = The gatekeeper contact cannot be parsed
+97 = The job manager could not find the 'poe' command
+98 = The job manager could not find the 'mpirun' command
+99 = The provided RSL 'start_time' parameter is invalid"
+100 = The provided RSL 'reservation_handle' parameter is invalid
+
+101 = The provided RSL 'max_wall_time' parameter is invalid
+102 = The RSL 'max_wall_time' value is not zero or greater
+103 = The provided RSL 'max_cpu_time' parameter is invalid
+104 = The RSL 'max_cpu_time' value is not zero or greater
+105 = The job manager is misconfigured, a scheduler script is missing
+106 = The job manager is misconfigured, a scheduler script has invalid permissions
+107 = The job manager failed to signal the job
+108 = The job manager did not recognize/support the signal type
+109 = The job manager failed to get the job id from the local scheduler
+
+110 = The job manager is waiting for a commit signal
+111 = The job manager timed out while waiting for a commit signal
+112 = The provided RSL 'save_state' parameter is invalid
+113 = The provided RSL 'restart' parameter is invalid
+114 = The provided RSL 'two_phase' parameter is invalid
+115 = The RSL 'two_phase' value is not zero or greater
+116 = The provided RSL 'stdout_position' parameter is invalid
+117 = The RSL 'stdout_position' value is not zero or greater
+118 = The provided RSL 'stderr_position' parameter is invalid
+119 = The RSL 'stderr_position' value is not zero or greater
+
+120 = The job manager restart attempt failed
+121 = The job state file doesn't exist
+122 = Could not read the job state file
+123 = Could not write the job state file
+124 = The old job manager is still alive
+125 = The job manager state file TTL expired
+126 = It is unknown if the job was submitted
+127 = The provided RSL 'remote_io_url' parameter is invalid
+128 = Could not write the remote io url file
+129 = The standard output/error size is different
+
+130 = The job manager was sent a stop signal (job is still running)
+131 = The user proxy expired (job is still running)
+132 = The job was not submitted by original jobmanager
+133 = The job manager is not waiting for that commit signal
+134 = The provided RSL scheduler specific parameter is invalid
+135 = The job manager could not stage in a file
+136 = The scratch directory could not be created
+137 = The provided 'gass_cache' parameter is invalid
+138 = The RSL contains attributes which are not valid for job submission
+139 = The RSL contains attributes which are not valid for stdio update
+
+140 = The RSL contains attributes which are not valid for job restart
+141 = The provided RSL 'file_stage_in' parameter is invalid
+142 = The provided RSL 'file_stage_in_shared' parameter is invalid
+143 = The provided RSL 'file_stage_out' parameter is invalid
+144 = The provided RSL 'gass_cache' parameter is invalid
+145 = The provided RSL 'file_cleanup' parameter is invalid
+146 = The provided RSL 'scratch_dir' parameter is invalid
+147 = The provided scheduler-specific RSL parameter is invalid
+148 = A required RSL attribute was not defined in the RSL spec
+149 = The gass_cache attribute points to an invalid cache directory
+
+150 = The provided RSL 'save_state' parameter has an invalid value
+151 = The job manager could not open the RSL attribute validation file
+152 = The  job manager could not read the RSL attribute validation file
+153 = The provided RSL 'proxy_timeout' is invalid
+154 = The RSL 'proxy_timeout' value is not greater than zero
+155 = The job manager could not stage out a file
+156 = The job contact string does not match any which the job manager is handling
+157 = Proxy delegation failed
+158 = The job manager could not lock the state lock file
+
+1000 = Failed to start up callback handler
+1003 = Job contact not set

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/resources/service.properties b/modules/gfac/gfac-hadoop/src/main/resources/service.properties
new file mode 100644
index 0000000..391bfea
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/main/resources/service.properties
@@ -0,0 +1,58 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+#
+# Class which implemented Scheduler interface. It will be used to determine a Provider
+#
+scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
+
+#
+# Data Service Plugins classes
+#
+datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
+
+#
+# Pre execution Plugins classes. For example, GridFTP Input Staging
+#
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
+
+#
+# Post execution Plugins classes. For example, GridFTP Output Staging
+#
+postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
+postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
+
+#
+# SSH private key location. It will be used by SSHProvider
+#
+# ssh.key=/home/user/.ssh/id_rsa
+# ssh.keypass=
+# ssh.username=usernameAtHost
+
+#
+# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
+#
+# myproxy.server=myproxy.teragrid.org
+# myproxy.user=username
+# myproxy.pass=password
+# myproxy.life=3600
\ No newline at end of file


[4/7] git commit: seperate gfac-local from gfac-core

Posted by la...@apache.org.
seperate gfac-local from gfac-core


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/761b81e9
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/761b81e9
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/761b81e9

Branch: refs/heads/temp
Commit: 761b81e9bc4a157be2b72cd494dcb7eab16ef4ae
Parents: 216e2d6
Author: Nipun Udara <y....@gmail.com>
Authored: Sat Apr 26 19:58:52 2014 +0530
Committer: Nipun Udara <y....@gmail.com>
Committed: Sat Apr 26 19:58:52 2014 +0530

----------------------------------------------------------------------
 modules/gfac/gfac-local/pom.xml | 23 -----------------------
 1 file changed, 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/761b81e9/modules/gfac/gfac-local/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/pom.xml b/modules/gfac/gfac-local/pom.xml
index 9310f22..929c3d7 100644
--- a/modules/gfac/gfac-local/pom.xml
+++ b/modules/gfac/gfac-local/pom.xml
@@ -63,27 +63,4 @@
 
     </dependencies>
 
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-dependency-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>copy-dependencies</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>copy-dependencies</goal>
-                        </goals>
-                        <configuration>
-                            <outputDirectory>target/lib</outputDirectory>
-                            <overWriteReleases>false</overWriteReleases>
-                            <overWriteSnapshots>true</overWriteSnapshots>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-
 </project>


[6/7] seperate gfac-hadoop from gfac-core

Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt b/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
new file mode 100644
index 0000000..e749e9c
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
@@ -0,0 +1,73 @@
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
+	distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
+	the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
+	obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
+	in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+	ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
+	the License. -->
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
+<xsl:output method="text" />
+<xsl:template match="/ns:JobDescriptor">
+#! /bin/sh
+# PBS batch job script built by Globus job manager
+#   <xsl:choose>
+    <xsl:when test="ns:shellName">
+##PBS -S <xsl:value-of select="ns:shellName"/>
+    </xsl:when></xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:queueName">
+#PBS -q <xsl:value-of select="ns:queueName"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:mailOptions">
+#PBS -m <xsl:value-of select="ns:mailOptions"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+<xsl:when test="ns:acountString">
+#PBS -A <xsl:value-of select="ns:acountString"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:maxWallTime">
+#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:standardOutFile">
+#PBS -o <xsl:value-of select="ns:standardOutFile"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="ns:standardOutFile">
+#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
+    </xsl:when>
+    </xsl:choose>
+    <xsl:choose>
+    <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
+#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of select="ns:processesPerNode"/>
+<xsl:text>&#xa;</xsl:text>
+    </xsl:when>
+    </xsl:choose>
+<xsl:for-each select="ns:exports/ns:name">
+<xsl:value-of select="."/>=<xsl:value-of select="./@value"/><xsl:text>&#xa;</xsl:text>
+export<xsl:text>   </xsl:text><xsl:value-of select="."/>
+<xsl:text>&#xa;</xsl:text>
+</xsl:for-each>
+<xsl:for-each select="ns:preJobCommands/ns:command">
+      <xsl:value-of select="."/><xsl:text>   </xsl:text>
+    </xsl:for-each>
+cd <xsl:text>   </xsl:text><xsl:value-of select="ns:workingDirectory"/><xsl:text>&#xa;</xsl:text>
+    <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
+<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text>   </xsl:text></xsl:when></xsl:choose><xsl:value-of select="ns:executablePath"/><xsl:text>   </xsl:text>
+<xsl:for-each select="ns:inputs/ns:input">
+      <xsl:value-of select="."/><xsl:text>   </xsl:text>
+    </xsl:for-each>
+<xsl:for-each select="ns:postJobCommands/ns:command">
+      <xsl:value-of select="."/><xsl:text>   </xsl:text>
+</xsl:for-each>
+
+</xsl:template>
+
+</xsl:stylesheet>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/test/resources/gfac-config.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/test/resources/gfac-config.xml b/modules/gfac/gfac-hadoop/src/test/resources/gfac-config.xml
new file mode 100644
index 0000000..7c7e704
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/test/resources/gfac-config.xml
@@ -0,0 +1,90 @@
+<!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
+    contributor license agreements. See the NOTICE file ~ distributed with this
+    work for additional information ~ regarding copyright ownership. The ASF
+    licenses this file ~ to you under the Apache License, Version 2.0 (the ~
+    "License"); you may not use this file except in compliance ~ with the License.
+    You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
+    ~ ~ Unless required by applicable law or agreed to in writing, ~ software
+    distributed under the License is distributed on an ~ "AS IS" BASIS, WITHOUT
+    WARRANTIES OR CONDITIONS OF ANY ~ KIND, either express or implied. See the
+    License for the ~ specific language governing permissions and limitations
+    ~ under the License. -->
+    
+<GFac>
+    <GlobalHandlers>
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.AppDescriptorCheckHandler">
+                    <property name="name" value="value"/>
+            </Handler>
+        </InHandlers>
+        <OutHandlers></OutHandlers>
+    </GlobalHandlers>
+    <Provider class="org.apache.airavata.gfac.provider.impl.LocalProvider" host="org.apache.airavata.schemas.gfac.impl.HostDescriptionTypeImpl">
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.LocalDirectorySetupHandler"/>
+        </InHandlers>
+    </Provider>
+    <Provider class="org.apache.airavata.gfac.provider.impl.GramProvider" host="org.apache.airavata.schemas.gfac.impl.GlobusHostTypeImpl">
+        <property name="name" value="value"/>
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GramDirectorySetupHandler">
+                    <property name="name" value="value"/>
+            </Handler>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+      <Provider class="org.apache.airavata.gfac.provider.impl.BESProvider" host="org.apache.airavata.schemas.gfac.impl.UnicoreHostTypeImpl">
+        <InHandlers>
+        	<Handler class="org.apache.airavata.gfac.handler.GramDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+
+    <Provider class="org.apache.airavata.gfac.ec2.EC2Provider" host="org.apache.airavata.schemas.gfac.impl.Ec2HostTypeImpl">
+        <InHandlers/>
+        <OutHandlers/>
+    </Provider>
+
+    <Provider class="org.apache.airavata.gfac.provider.impl.HadoopProvider" host="org.apache.airavata.schemas.gfac.impl.HadoopHostTypeImpl">
+        <InHandlers>
+        	<Handler class="org.apache.airavata.gfac.handler.HadoopDeploymentHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.HDFSDataMovementHandler"/>
+        </InHandlers>
+        <OutHandlers/>
+    </Provider>
+
+    <Application name="UltraScan">
+        <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GramDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GridFTPOutputHandler"/>
+        </OutHandlers>
+    </Application>
+
+     <Provider class="org.apache.airavata.gfac.provider.impl.SSHProvider" host="org.apache.airavata.schemas.gfac.impl.SSHHostTypeImpl">
+         <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.SSHDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.GSISSHInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GSISSHOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+     <Provider class="org.apache.airavata.gfac.provider.impl.GSISSHProvider" host="org.apache.airavata.schemas.gfac.impl.GsisshHostTypeImpl">
+         <InHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GSISSHDirectorySetupHandler"/>
+            <Handler class="org.apache.airavata.gfac.handler.GSISSHInputHandler"/>
+        </InHandlers>
+        <OutHandlers>
+            <Handler class="org.apache.airavata.gfac.handler.GSISSHOutputHandler"/>
+        </OutHandlers>
+    </Provider>
+</GFac>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/test/resources/logging.properties b/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
new file mode 100644
index 0000000..0584d38
--- /dev/null
+++ b/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
@@ -0,0 +1,42 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#default/fallback log4j configuration
+#
+
+# Set root logger level to WARN and its only appender to A1.
+log4j.rootLogger=INFO, A1, A2
+
+# A1 is set to be a rolling file appender with default params
+log4j.appender.A1=org.apache.log4j.RollingFileAppender
+log4j.appender.A1.File=target/seclogs.txt
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
+
+# A2 is a console appender
+log4j.appender.A2=org.apache.log4j.ConsoleAppender
+
+# A2 uses PatternLayout.
+log4j.appender.A2.layout=org.apache.log4j.PatternLayout
+log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
+
+log4j.logger.unicore.security=INFO
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/gfac-ssh/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/pom.xml b/modules/gfac/gfac-ssh/pom.xml
index c463661..6d15355 100644
--- a/modules/gfac/gfac-ssh/pom.xml
+++ b/modules/gfac/gfac-ssh/pom.xml
@@ -103,6 +103,11 @@
             <artifactId>xmlbeans</artifactId>
             <version>${xmlbeans.version}</version>
         </dependency>
+        <dependency>
+            <groupId>net.schmizz</groupId>
+            <artifactId>sshj</artifactId>
+            <version>0.6.1</version>
+        </dependency>
 
     </dependencies>
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/9a787309/modules/gfac/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/pom.xml b/modules/gfac/pom.xml
index d2ee2a5..774a445 100644
--- a/modules/gfac/pom.xml
+++ b/modules/gfac/pom.xml
@@ -34,7 +34,8 @@
                 <module>gfac-ec2</module>
                 <module>gfac-ssh</module>
                 <module>gfac-local</module>
-		<module>gfac-gram</module>
+                <module>gfac-hadoop</module>
+		        <module>gfac-gram</module>
                 <module>gfac-gsissh</module>
 		<module>gfac-bes</module>
             </modules>