You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by la...@apache.org on 2014/04/04 15:22:52 UTC

[2/4] Fixing the build to handle grid test cases

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/ParamChemTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/ParamChemTest.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/ParamChemTest.java
deleted file mode 100644
index 5e8d1fa..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/ParamChemTest.java
+++ /dev/null
@@ -1,306 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.ApplicationContext;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.handler.GFacHandlerConfig;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.DataType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.junit.Before;
-import org.junit.Test;
-
-public class ParamChemTest extends GFacBaseTestCase {
-    private JobExecutionContext jobExecutionContext;
-
-    @Before
-    public void setUp() throws Exception {
-
-        GFacConfiguration gFacConfiguration = new GFacConfiguration(null);
-        GSISecurityContext context = getSecurityContext();
-
-        //have to set InFlwo Handlers and outFlowHandlers
-        gFacConfiguration.setInHandlers(Arrays.asList(new GFacHandlerConfig[]{new GFacHandlerConfig(null,"org.apache.airavata.gfac.handler.GramDirectorySetupHandler"), new GFacHandlerConfig(null,"org.apache.airavata.gfac.handler.GridFTPInputHandler")}));
-        gFacConfiguration.setOutHandlers(Arrays.asList(new GFacHandlerConfig[] {new GFacHandlerConfig(null,"org.apache.airavata.gfac.handler.GridFTPOutputHandler")}));
-        /*
-        * Host
-        */
-        String serviceName = "Prepare_Model_Reference_Data";
-        HostDescription host = new HostDescription(GlobusHostType.type);
-        host.getType().setHostName("trestles");
-        host.getType().setHostAddress("trestles.sdsc.edu");
-        ((GlobusHostType) host.getType()).addGridFTPEndPoint("gsiftp://trestles-dm.sdsc.edu:2811");
-        ((GlobusHostType) host.getType()).addGlobusGateKeeperEndPoint("trestles-login2.sdsc.edu:2119/jobmanager-pbstest2");
-
-        /*
-        * App
-        */
-        ApplicationDescription appDesc =
-                new ApplicationDescription(HpcApplicationDeploymentType.type);
-        ApplicationDeploymentDescriptionType applicationDeploymentDescriptionType
-                = appDesc.getType();
-        applicationDeploymentDescriptionType.addNewApplicationName().setStringValue(serviceName);
-        String tempDir = "/oasis/projects/nsf/uic151/gridchem/airavata-workdirs";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + serviceName + "_" + date + "_" + UUID.randomUUID();
-        applicationDeploymentDescriptionType.setExecutableLocation("/home/gridchem/workflow_script/sys_exec/scripts/step1/step1_model_refdata_prep.sh");
-        applicationDeploymentDescriptionType.setScratchWorkingDirectory(tempDir);
-        applicationDeploymentDescriptionType.setStaticWorkingDirectory(tempDir);
-        applicationDeploymentDescriptionType.setInputDataDirectory(tempDir + File.separator + "inputData");
-        applicationDeploymentDescriptionType.setOutputDataDirectory(tempDir + File.separator + "outputData");
-        applicationDeploymentDescriptionType.setStandardOutput(tempDir + File.separator + applicationDeploymentDescriptionType.getApplicationName().getStringValue() + ".stdout");
-        applicationDeploymentDescriptionType.setStandardError(tempDir + File.separator + applicationDeploymentDescriptionType.getApplicationName().getStringValue() + ".stderr");
-
-        ProjectAccountType projectAccountType = ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).addNewProjectAccount();
-        projectAccountType.setProjectAccountNumber("uic151");
-
-        QueueType queueType = ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).addNewQueue();
-        queueType.setQueueName("shared");
-
-        ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).setJobType(JobTypeType.SERIAL);
-        ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).setMaxWallTime(30);
-        ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).setMaxMemory(2000);
-        ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).setCpuCount(1);
-        ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).setNodeCount(1);
-        ((HpcApplicationDeploymentType) applicationDeploymentDescriptionType).setProcessorsPerNode(1);
-
-
-        /*
-        * Service
-        */
-        ServiceDescription serv = new ServiceDescription();
-        List<InputParameterType> inputParameters = new ArrayList<InputParameterType>();
-        List<OutputParameterType> outputParameters = new ArrayList<OutputParameterType>();
-
-        serv.getType().setName(serviceName);
-        serv.getType().setDescription(serviceName);
-
-        //Creating input parameters
-        InputParameterType parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("molecule_id");
-        ParameterType parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.STRING);
-        parameterType.setName("String");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("geom_mol2");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.URI);
-        parameterType.setName("URI");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("toppar_main_tgz");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.URI);
-        parameterType.setName("URI");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("toppar_usr_tgz");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.URI);
-        parameterType.setName("URI");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("toppar_mol_str");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.URI);
-        parameterType.setName("URI");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("molecule_dir_in_tgz");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.URI);
-        parameterType.setName("URI");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("GC_UserName");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.STRING);
-        parameterType.setName("String");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("GC_ProjectName");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.STRING);
-        parameterType.setName("String");
-        inputParameters.add(parameter);
-
-        parameter = InputParameterType.Factory.newInstance();
-        parameter.setParameterName("GC_WorkflowName");
-        parameterType = parameter.addNewParameterType();
-        parameterType.setType(DataType.STRING);
-        parameterType.setName("String");
-        inputParameters.add(parameter);
-
-        //Creating output parameters
-        OutputParameterType outputParameter = OutputParameterType.Factory.newInstance();
-        outputParameter.setParameterName("opt_freq_input_gjf");
-        ParameterType outputParaType = outputParameter.addNewParameterType();
-        outputParaType.setType(DataType.URI);
-        outputParaType.setName("URI");
-        outputParameters.add(outputParameter);
-
-        outputParameter = OutputParameterType.Factory.newInstance();
-        outputParameter.setParameterName("charmm_miminized_crd");
-        outputParaType = outputParameter.addNewParameterType();
-        outputParaType.setType(DataType.URI);
-        outputParaType.setName("URI");
-        outputParameters.add(outputParameter);
-
-        outputParameter = OutputParameterType.Factory.newInstance();
-        outputParameter.setParameterName("step1_log");
-        outputParaType = outputParameter.addNewParameterType();
-        outputParaType.setType(DataType.URI);
-        outputParaType.setName("URI");
-        outputParameters.add(outputParameter);
-
-        outputParameter = OutputParameterType.Factory.newInstance();
-        outputParameter.setParameterName("molecule_dir_out_tgz");
-        outputParaType = outputParameter.addNewParameterType();
-        outputParaType.setType(DataType.URI);
-        outputParaType.setName("URI");
-        outputParameters.add(outputParameter);
-
-        outputParameter = OutputParameterType.Factory.newInstance();
-        outputParameter.setParameterName("gcvars");
-        outputParaType = outputParameter.addNewParameterType();
-        outputParaType.setType(DataType.URI);
-        outputParaType.setName("URI");
-        outputParameters.add(outputParameter);
-
-        //Setting input and output parameters to serviceDescriptor
-        serv.getType().setInputParametersArray(inputParameters.toArray(new InputParameterType[]{}));
-        serv.getType().setOutputParametersArray(outputParameters.toArray(new OutputParameterType[]{}));
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, context);
-        ApplicationContext applicationContext = new ApplicationContext();
-        applicationContext.setHostDescription(host);
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-
-        MessageContext inMessage = new MessageContext();
-
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("ai");
-        inMessage.addParameter("molecule_id", echo_input);
-
-        ActualParameter geom_mol2 = new ActualParameter(URIParameterType.type);
-        ((URIParameterType) geom_mol2.getType()).setValue("http://ccg-mw1.ncsa.uiuc.edu/cgenff/leoshen/cgenff_project/ai/ai.mol2");
-        inMessage.addParameter("geom_mol2", geom_mol2);
-
-        ActualParameter toppar_main_tgz = new ActualParameter(URIParameterType.type);
-        ((URIParameterType) toppar_main_tgz.getType()).setValue("/home/gridchem/workflow_script/toppar/cgenff/releases/2b7/main.tgz");
-        inMessage.addParameter("toppar_main_tgz", toppar_main_tgz);
-
-        ActualParameter toppar_usr_tgz = new ActualParameter(URIParameterType.type);
-        ((URIParameterType) toppar_usr_tgz.getType()).setValue("gsiftp://trestles.sdsc.edu");
-        inMessage.addParameter("toppar_usr_tgz", toppar_usr_tgz);
-
-        ActualParameter toppar_mol_str = new ActualParameter(URIParameterType.type);
-        ((URIParameterType) toppar_mol_str.getType()).setValue("http://ccg-mw1.ncsa.uiuc.edu/cgenff/leoshen/cgenff_project/ai/toppar/ai.str");
-        inMessage.addParameter("toppar_mol_str", toppar_mol_str);
-
-        ActualParameter molecule_dir_in_tgz = new ActualParameter(URIParameterType.type);
-        ((URIParameterType) molecule_dir_in_tgz.getType()).setValue("");
-        inMessage.addParameter("molecule_dir_in_tgz", molecule_dir_in_tgz);
-
-        ActualParameter GC_UserName = new ActualParameter();
-        ((StringParameterType) GC_UserName.getType()).setValue("leoshen");
-        inMessage.addParameter("GC_UserName", GC_UserName);
-
-        ActualParameter GC_ProjectName = new ActualParameter();
-        ((StringParameterType) GC_ProjectName.getType()).setValue("leoshen");
-        inMessage.addParameter("GC_ProjectName", GC_ProjectName);
-
-        ActualParameter GC_WorkflowName = new ActualParameter();
-        ((StringParameterType) GC_WorkflowName.getType()).setValue("ai__1339258840");
-        inMessage.addParameter("GC_WorkflowName", GC_WorkflowName);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-
-        ActualParameter opt_freq_input_gjf = new ActualParameter(URIParameterType.type);
-        outMessage.addParameter("opt_freq_input_gjf", opt_freq_input_gjf);
-
-        ActualParameter charmm_miminized_crd = new ActualParameter(URIParameterType.type);
-        outMessage.addParameter("charmm_miminized_crd", charmm_miminized_crd);
-
-        ActualParameter step1_log = new ActualParameter(URIParameterType.type);
-        outMessage.addParameter("step1_log", step1_log);
-
-        ActualParameter molecule_dir_out_tgz = new ActualParameter(URIParameterType.type);
-        outMessage.addParameter("molecule_dir_out_tgz", molecule_dir_out_tgz);
-
-        ActualParameter gcvars = new ActualParameter(URIParameterType.type);
-        outMessage.addParameter("gcvars", gcvars);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-    @Test
-    public void testGramProvider() throws GFacException {
-//        GFacImpl gFacAPI = new GFacImpl();
-//        gFacAPI.submitJob(jobExecutionContext);
-//        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-//        Assert.assertFalse(outMessageContext.getParameters().isEmpty());
-//        Assert.assertEquals(MappingFactory.toString((ActualParameter) outMessageContext.getParameter("echo_output")), "hello");
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTest.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTest.java
deleted file mode 100644
index 5ac1b43..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTest.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.ApplicationContext;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.SSHSecurityContext;
-import org.apache.airavata.gfac.cpi.GFacImpl;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.lang.SystemUtils;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class SSHProviderTest {
-	private JobExecutionContext jobExecutionContext;
-    @Before
-    public void setUp() throws Exception {
-
-    	URL resource = GramProviderTest.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null,null);
-//        gFacConfiguration.s
-        //have to set InFlwo Handlers and outFlowHandlers
-        ApplicationContext applicationContext = new ApplicationContext();
-        HostDescription host = new HostDescription(SSHHostType.type);
-        host.getType().setHostName("bigred");
-        host.getType().setHostAddress("bigred2.uits.iu.edu");
-        applicationContext.setHostDescription(host);
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new ApplicationDescription();
-        ApplicationDeploymentDescriptionType app = appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoSSH");
-        app.setApplicationName(name);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        if (SystemUtils.IS_OS_WINDOWS) {
-            URL url = this.getClass().getClassLoader().getResource("echo.bat");
-            app.setExecutableLocation(url.getFile());
-        } else {
-            //for unix and Mac
-            app.setExecutableLocation("/bin/echo");
-        }
-
-        /*
-         * Job location
-        */
-        String tempDir = "/tmp";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + "EchoSSH" + "_" + date + "_" + UUID.randomUUID();
-
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "input");
-        app.setOutputDataDirectory(tempDir + File.separator + "output");
-        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
-        app.setStandardError(tempDir + File.separator + "echo.stderr");
-
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("EchoSSH");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-                .size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        jobExecutionContext.setApplicationContext(applicationContext);
-
-        // Add security context
-        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext());
-        /*
-        * Host
-        */
-        applicationContext.setServiceDescription(serv);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-	private SSHSecurityContext getSecurityContext() {
-		SSHSecurityContext context = new SSHSecurityContext();
-        context.setUsername("lginnali");
-        context.setPrivateKeyLoc("~/.ssh/id_dsa");
-        context.setKeyPass("i want to be free");
-		return context;
-	}
-
-    @Test
-    public void testLocalProvider() throws GFacException {
-        GFacImpl gFacAPI = new GFacImpl();
-        gFacAPI.submitJob(jobExecutionContext);
-        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/US3Test.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/US3Test.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/US3Test.java
deleted file mode 100644
index bb28edb..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/US3Test.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.util.Date;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.gfac.context.ApplicationContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.cpi.GFacImpl;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.ExtendedKeyValueType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.junit.Before;
-import org.junit.Test;
-
-public class US3Test extends AbstractBESTest{
-	
-	
-	@Before
-	public void initJobContext() throws Exception {
-		initTest();
-	}
-
-	@Test
-	public void submitJob() throws Exception {
-		JobTypeType jobType = JobTypeType.Factory.newInstance();
-		jobType.set(JobTypeType.MPI);
-		ApplicationContext appContext = getApplicationContext();
-		appContext.setApplicationDeploymentDescription(getApplicationDesc(jobType));
-		jobExecutionContext.setApplicationContext(appContext);
-		GFacImpl gFacAPI = new GFacImpl();
-		gFacAPI.submitJob(jobExecutionContext);
-	}
-	
-	
-	protected ApplicationDescription getApplicationDesc(JobTypeType jobType) {
-		ApplicationDescription appDesc = new ApplicationDescription(
-				HpcApplicationDeploymentType.type);
-		HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) appDesc
-				.getType();
-		ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory
-				.newInstance();
-		name.setStringValue("US3-Job");
-		appDepType.setApplicationName(name);
-
-//		ProjectAccountType projectAccountType = appDepType.addNewProjectAccount();
-//		projectAccountType.setProjectAccountNumber("TG-AST110064");
-
-//		QueueType queueType = appDepType.addNewQueue();
-//		queueType.setQueueName("development");
-
-		// TODO: also handle parallel jobs
-		if((jobType.enumValue() == JobTypeType.SERIAL) || (jobType.enumValue() == JobTypeType.SINGLE)) {
-			appDepType.setJobType(JobTypeType.SERIAL);
-		}
-		else if (jobType.enumValue() == JobTypeType.MPI) {
-			appDepType.setJobType(JobTypeType.MPI);
-		}
-		else {
-			appDepType.setJobType(JobTypeType.OPEN_MP);
-		}
-		
-		appDepType.setNodeCount(4);
-		appDepType.setProcessorsPerNode(8);
-		
-		appDepType.setMaxWallTime(15);
-		
-		appDepType.setExecutableLocation("us_mpi_analysis");
-		
-		ExtendedKeyValueType extKV = appDepType.addNewKeyValuePairs();
-		// using jsdl spmd standard
-		extKV.setName("NumberOfProcesses");
-		// this will be transformed into mpiexec -n 4
-		extKV.setStringValue("32"); 
-		
-		/*
-		 * Default tmp location
-		 */
-		String date = (new Date()).toString();
-		date = date.replaceAll(" ", "_");
-		date = date.replaceAll(":", "_");
-
-		String remoteTempDir = scratchDir + File.separator + "US3" + "_" + date + "_"
-				+ UUID.randomUUID();
-
-		System.out.println(remoteTempDir);
-		
-		// no need of these parameters, as unicore manages by itself
-		appDepType.setScratchWorkingDirectory(remoteTempDir);
-		appDepType.setStaticWorkingDirectory(remoteTempDir);
-		appDepType.setInputDataDirectory(remoteTempDir + File.separator + "inputData");
-		appDepType.setOutputDataDirectory(remoteTempDir + File.separator + "outputData");
-		
-		appDepType.setStandardOutput(appDepType.getOutputDataDirectory()+"/stdout");
-		
-		appDepType.setStandardError(appDepType.getOutputDataDirectory()+"/stderr");
-
-		return appDesc;
-	}
-	protected MessageContext getInMessageContext() {
-		MessageContext inMessage = new MessageContext();
-		
-	    ActualParameter a1 = new ActualParameter();
-	    a1.getType().changeType(StringParameterType.type);
-	    ((StringParameterType)a1.getType()).setValue("hpcinput-uslims3.uthscsa.edu-uslims3_cauma3-01594.tar");
-	    inMessage.addParameter("arg1", a1);
-	        
-        ActualParameter i1 = new ActualParameter();
-        i1.getType().changeType(URIParameterType.type);
-        ((URIParameterType)i1.getType()).setValue("file:///"+System.getProperty("user.home")+"/juelich-us3/hpcinput-uslims3.uthscsa.edu-uslims3_cauma3-01594.tar");
-        inMessage.addParameter("i1", i1);
-
-        return inMessage;
-	}
-
-	protected MessageContext getOutMessageContext() {
-		MessageContext outMessage = new MessageContext();
-		
-		ActualParameter a1 = new ActualParameter();
-		a1.getType().changeType(StringParameterType.type);
-		((StringParameterType)a1.getType()).setValue("output/analysis-results.tar");
-		outMessage.addParameter("o1", a1);
-
-		return outMessage;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/context/security/GSISecurityContextTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/context/security/GSISecurityContextTest.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/context/security/GSISecurityContextTest.java
deleted file mode 100644
index 54e7703..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/context/security/GSISecurityContextTest.java
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.context.security;
-
-import junit.framework.Assert;
-import org.apache.airavata.common.utils.AiravataUtils;
-import org.apache.airavata.common.utils.DatabaseTestCases;
-import org.apache.airavata.common.utils.DerbyUtil;
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.credential.store.store.CredentialReaderFactory;
-import org.apache.airavata.gfac.RequestData;
-import org.apache.log4j.Logger;
-import org.ietf.jgss.GSSCredential;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 7/12/13
- * Time: 12:58 PM
- */
-
-public class GSISecurityContextTest extends DatabaseTestCases {
-
-    private static String userName;
-    private static String password;
-
-    private static final Logger log = Logger.getLogger(GSISecurityContextTest.class);
-
-    @BeforeClass
-    public static void setUpClass() throws Exception {
-        AiravataUtils.setExecutionAsServer();
-
-        userName = System.getProperty("myproxy.user");
-        password = System.getProperty("myproxy.password");
-
-        if (userName == null || password == null || userName.trim().equals("") || password.trim().equals("")) {
-            log.error("===== Please set myproxy.user and myproxy.password system properties. =======");
-            Assert.fail("Please set myproxy.user and myproxy.password system properties.");
-        }
-
-        log.info("Using my proxy user name - " + userName);
-
-        setUpDatabase();
-
-    }
-
-    public static void setUpDatabase() throws Exception {
-        DerbyUtil.startDerbyInServerMode(getHostAddress(), getPort(), getUserName(), getPassword());
-
-        waitTillServerStarts();
-
-        /*
-         * String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n" + "        GATEWAY_NAME VARCHAR(256) NOT NULL,\n" +
-         * "        COMMUNITY_USER_NAME VARCHAR(256) NOT NULL,\n" + "        CREDENTIAL BLOB NOT NULL,\n" +
-         * "        PRIVATE_KEY BLOB NOT NULL,\n" + "        NOT_BEFORE VARCHAR(256) NOT NULL,\n" +
-         * "        NOT_AFTER VARCHAR(256) NOT NULL,\n" + "        LIFETIME INTEGER NOT NULL,\n" +
-         * "        REQUESTING_PORTAL_USER_NAME VARCHAR(256) NOT NULL,\n" +
-         * "        REQUESTED_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',\n" +
-         * "        PRIMARY KEY (GATEWAY_NAME, COMMUNITY_USER_NAME)\n" + ")";
-         */
-
-        String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n"
-                + "        GATEWAY_ID VARCHAR(256) NOT NULL,\n"
-                + "        TOKEN_ID VARCHAR(256) NOT NULL,\n"
-                + // Actual token used to identify the credential
-                "        CREDENTIAL BLOB NOT NULL,\n" + "        PORTAL_USER_ID VARCHAR(256) NOT NULL,\n"
-                + "        TIME_PERSISTED TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n"
-                + "        PRIMARY KEY (GATEWAY_ID, TOKEN_ID)\n" + ")";
-
-        String dropTable = "drop table CREDENTIALS";
-
-        try {
-            executeSQL(dropTable);
-        } catch (Exception e) {
-        }
-
-        executeSQL(createTable);
-
-    }
-
-    @AfterClass
-    public static void shutDownDatabase() throws Exception {
-        DerbyUtil.stopDerbyServer();
-    }
-
-    @Test
-    public void testGetTrustedCertificatePath() throws Exception {
-
-        File f = new File(GSISecurityContext.getTrustedCertificatePath());
-        Assert.assertTrue(f.exists());
-    }
-
-    private GSSCredential getGSSCredentials() throws Exception {
-
-        GSISecurityContext gsiSecurityContext = getGSISecurityContext();
-
-        return gsiSecurityContext.getGssCredentials();
-    }
-
-    private GSISecurityContext getGSISecurityContext() throws Exception {
-
-        RequestData requestData = new RequestData();
-
-        requestData.setMyProxyUserName(userName);
-        requestData.setMyProxyPassword(password);
-
-        CredentialReader credentialReader = CredentialReaderFactory.createCredentialStoreReader(getDbUtil());
-
-        return new GSISecurityContext(credentialReader, requestData);
-    }
-
-    @Test
-    public void testGetGssCredentials() throws Exception {
-
-        Assert.assertNotNull(getGSSCredentials());
-    }
-
-    @Test
-    public void testRenewCredentials() throws Exception {
-        GSISecurityContext gsiSecurityContext = getGSISecurityContext();
-        Assert.assertNotNull(gsiSecurityContext.renewCredentials());
-
-    }
-
-    @Test
-    public void testGetCredentialsFromStore() throws Exception {
-        GSISecurityContext gsiSecurityContext = getGSISecurityContext();
-        Assert.assertNotNull(gsiSecurityContext.getCredentialsFromStore());
-
-    }
-
-    @Test
-    public void testGetDefaultCredentials() throws Exception {
-        GSISecurityContext gsiSecurityContext = getGSISecurityContext();
-        Assert.assertNotNull(gsiSecurityContext.getDefaultCredentials());
-
-    }
-
-    @Test
-    public void testGetProxyCredentials() throws Exception {
-        GSISecurityContext gsiSecurityContext = getGSISecurityContext();
-        Assert.assertNotNull(gsiSecurityContext.getProxyCredentials());
-
-    }
-
-    @Test
-    public void testRenewCredentialsAsATrustedHost() throws Exception {
-        GSISecurityContext gsiSecurityContext = getGSISecurityContext();
-        Assert.assertNotNull(gsiSecurityContext.renewCredentialsAsATrustedHost());
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/provider/impl/GramProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/provider/impl/GramProviderTest.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/provider/impl/GramProviderTest.java
deleted file mode 100644
index 0a0bd8e..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/gfac/provider/impl/GramProviderTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.impl;
-
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 7/12/13
- * Time: 11:17 AM
- */
-
-public class GramProviderTest {
-    @BeforeMethod
-    public void setUp() throws Exception {
-
-    }
-
-    @AfterMethod
-    public void tearDown() throws Exception {
-
-    }
-
-    @Test
-    public void testExecute() throws Exception {
-
-        GramProvider gramProvider = new GramProvider();
-
-        System.out.println(gramProvider.getGramErrorString(1));
-
-        System.out.println("======================================================================================");
-        System.out.println("Executing test .......................................................................");
-
-    }
-
-    @Test
-    public void testCancelJob() throws Exception {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/gfac/gfac-ec2/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/pom.xml b/modules/gfac/gfac-ec2/pom.xml
index a5a69dc..11ce4ca 100644
--- a/modules/gfac/gfac-ec2/pom.xml
+++ b/modules/gfac/gfac-ec2/pom.xml
@@ -127,23 +127,4 @@
         </dependency>
 
     </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <!--skip>true</skip -->
-                    <excludes>
-                        <exclude>**/services/**</exclude>
-                        <exclude>**/gfac/**</exclude>
-                    </excludes>
-                    <forkMode>always</forkMode>
-                    <failIfNoTests>false</failIfNoTests>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-
 </project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/gfac/gfac-monitor/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/pom.xml b/modules/gfac/gfac-monitor/pom.xml
index f09be35..3fe60fc 100644
--- a/modules/gfac/gfac-monitor/pom.xml
+++ b/modules/gfac/gfac-monitor/pom.xml
@@ -14,7 +14,7 @@
         <groupId>org.apache.airavata</groupId>
         <artifactId>airavata</artifactId>
         <version>0.12-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
+        <relativePath>../../../pom.xml</relativePath>
     </parent>
 
     <modelVersion>4.0.0</modelVersion>
@@ -77,19 +77,4 @@
             <scope>test</scope>
         </dependency>
     </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <skip>false</skip>
-                    <forkMode>always</forkMode>
-                    <failIfNoTests>false</failIfNoTests>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-
 </project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/integration-tests/pom.xml
----------------------------------------------------------------------
diff --git a/modules/integration-tests/pom.xml b/modules/integration-tests/pom.xml
index 90e64a7..63680dd 100644
--- a/modules/integration-tests/pom.xml
+++ b/modules/integration-tests/pom.xml
@@ -203,61 +203,6 @@
                     </execution>
                 </executions>
             </plugin>
-
-
-
-            <!--plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <version>2.16</version>
-            </plugin -->
-            <!--plugin>
-                <groupId>org.codehaus.cargo</groupId>
-                <artifactId>cargo-maven2-plugin</artifactId>
-                <version>1.3.1</version>
-                <configuration>
-
-                    <container>
-                        <containerId>tomcat${major.tomcat.version}x</containerId>
-                        <type>installed</type>
-                        <home>${tomcat.work.dir}</home>
-                    </container>
-
-                    <configuration>
-                        <properties>
-                            <cargo.servlet.port>${test.running.port}</cargo.servlet.port>
-                            <cargo.tomcat.ajp.port>9009</cargo.tomcat.ajp.port>
-                            <cargo.rmi.port>9099</cargo.rmi.port>
-                            <cargo.jvmargs>
-                                -Xms512m
-                                -Xmx512m
-                                -XX:MaxPermSize=256m
-                            </cargo.jvmargs>
-                            <cargo.tomcat.context.reloadable>true</cargo.tomcat.context.reloadable>
-                        </properties>
-                        <type>existing</type>
-                        <home>${tomcat.work.dir}</home>
-
-                    </configuration>
-
-                </configuration>
-                <executions>
-                    <execution>
-                        <id>start-container</id>
-                        <phase>compile</phase>
-                        <goals>
-                            <goal>start</goal>
-                        </goals>
-                    </execution>
-                    <execution>
-                        <id>stop-container</id>
-                        <phase>post-integration-test</phase>
-                        <goals>
-                            <goal>stop</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin-->
         </plugins>
     </build>
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/OrchestratorClientFactoryTest.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/OrchestratorClientFactoryTest.java b/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/OrchestratorClientFactoryTest.java
index 36bb0f6..de5113a 100644
--- a/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/OrchestratorClientFactoryTest.java
+++ b/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/OrchestratorClientFactoryTest.java
@@ -33,7 +33,9 @@ import org.apache.airavata.model.workspace.experiment.ComputationalResourceSched
 import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.UserConfigurationData;
+import org.apache.airavata.orchestrator.client.util.Initialize;
 import org.apache.airavata.orchestrator.cpi.OrchestratorService;
+import org.apache.airavata.orchestrator.server.OrchestratorServer;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.ParentDataType;
 import org.apache.airavata.registry.cpi.Registry;
@@ -50,16 +52,20 @@ public class OrchestratorClientFactoryTest {
     private OrchestratorService.Client orchestratorClient;
     private Registry registry;
     private int NUM_CONCURRENT_REQUESTS = 1;
+    Initialize initialize;
     @Before
     public void setUp() {
-        orchestratorClient = OrchestratorClientFactory.createOrchestratorClient("localhost", 8940);
+        initialize = new Initialize("registry-derby.sql");
+        initialize.initializeDB();
+        try {
+            new OrchestratorServer().start();
+        } catch (Exception e) {
+            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+        }
         registry = RegistryFactory.getDefaultRegistry();
         AiravataUtils.setExecutionAsServer();
         documentCreator = new DocumentCreator(getAiravataAPI());
         documentCreator.createLocalHostDocs();
-        documentCreator.createGramDocs();
-        documentCreator.createPBSDocsForOGCE();
-        documentCreator.createSlurmDocs();
     }
 
     private AiravataAPI getAiravataAPI() {
@@ -82,6 +88,8 @@ public class OrchestratorClientFactoryTest {
 
     @Test
     public void storeExperimentDetail() {
+        orchestratorClient = OrchestratorClientFactory.createOrchestratorClient("localhost", 8940);
+
             for (int i = 0; i < NUM_CONCURRENT_REQUESTS; i++) {
                 Thread thread = new Thread() {
                     public void run() {
@@ -101,11 +109,11 @@ public class OrchestratorClientFactoryTest {
                         exOut.add(output);
 
                         Experiment simpleExperiment = ExperimentModelUtil.createSimpleExperiment("project1",
-                                "admin", "echoExperiment", "SimpleEcho2", "SimpleEcho2", exInputs);
+                                "admin", "echoExperiment", "SimpleEcho0", "SimpleEcho0", exInputs);
                         simpleExperiment.setExperimentOutputs(exOut);
 
-                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("stampede.tacc.xsede.org", 1, 1, 1, "normal", 0, 0, 1, "TG-STA110014S");
-                        scheduling.setResourceHostId("stampede-host");
+                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("localhost", 1, 1, 1, "normal", 0, 0, 1, "TG-STA110014S");
+                        scheduling.setResourceHostId("localhost");
                         UserConfigurationData userConfigurationData = new UserConfigurationData();
                         userConfigurationData.setComputationalResourceScheduling(scheduling);
                         simpleExperiment.setUserConfigurationData(userConfigurationData);

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/util/Initialize.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/util/Initialize.java b/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/util/Initialize.java
new file mode 100644
index 0000000..2f1f368
--- /dev/null
+++ b/modules/orchestrator/airavata-orchestrator-service/src/test/java/org/apache/airavata/orchestrator/client/util/Initialize.java
@@ -0,0 +1,332 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.orchestrator.client.util;
+
+import org.apache.airavata.persistance.registry.jpa.ResourceType;
+import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+import org.apache.airavata.persistance.registry.jpa.resources.*;
+import org.apache.airavata.registry.api.exception.RegistrySettingsException;
+import org.apache.airavata.registry.api.util.RegistrySettings;
+import org.apache.derby.drda.NetworkServerControl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.InetAddress;
+import java.sql.*;
+import java.util.StringTokenizer;
+
+public class Initialize {
+    private static final Logger logger = LoggerFactory.getLogger(Initialize.class);
+    public static final String DERBY_SERVER_MODE_SYS_PROPERTY = "derby.drda.startNetworkServer";
+    public  String scriptName = "registry-derby.sql";
+    private NetworkServerControl server;
+    private static final String delimiter = ";";
+    public static final String PERSISTANT_DATA = "Configuration";
+
+    public Initialize(String scriptName) {
+        this.scriptName = scriptName;
+    }
+
+    public static boolean checkStringBufferEndsWith(StringBuffer buffer, String suffix) {
+        if (suffix.length() > buffer.length()) {
+            return false;
+        }
+        // this loop is done on purpose to avoid memory allocation performance
+        // problems on various JDKs
+        // StringBuffer.lastIndexOf() was introduced in jdk 1.4 and
+        // implementation is ok though does allocation/copying
+        // StringBuffer.toString().endsWith() does massive memory
+        // allocation/copying on JDK 1.5
+        // See http://issues.apache.org/bugzilla/show_bug.cgi?id=37169
+        int endIndex = suffix.length() - 1;
+        int bufferIndex = buffer.length() - 1;
+        while (endIndex >= 0) {
+            if (buffer.charAt(bufferIndex) != suffix.charAt(endIndex)) {
+                return false;
+            }
+            bufferIndex--;
+            endIndex--;
+        }
+        return true;
+    }
+
+    private static boolean isServerStarted(NetworkServerControl server, int ntries)
+    {
+        for (int i = 1; i <= ntries; i ++)
+        {
+            try {
+                Thread.sleep(500);
+                server.ping();
+                return true;
+            }
+            catch (Exception e) {
+                if (i == ntries)
+                    return false;
+            }
+        }
+        return false;
+    }
+
+    public void initializeDB() {
+        String jdbcUrl = null;
+        String jdbcDriver = null;
+        String jdbcUser = null;
+        String jdbcPassword = null;
+        try{
+            jdbcDriver = RegistrySettings.getSetting("registry.jdbc.driver");
+            jdbcUrl = RegistrySettings.getSetting("registry.jdbc.url");
+            jdbcUser = RegistrySettings.getSetting("registry.jdbc.user");
+            jdbcPassword = RegistrySettings.getSetting("registry.jdbc.password");
+            jdbcUrl = jdbcUrl + "?" + "user=" + jdbcUser + "&" + "password=" + jdbcPassword;
+        } catch (RegistrySettingsException e) {
+            logger.error("Unable to read properties" , e);
+        }
+
+
+        startDerbyInServerMode();
+        if(!isServerStarted(server, 20)){
+           throw new RuntimeException("Derby server cound not started within five seconds...");
+        }
+//      startDerbyInEmbeddedMode();
+
+        Connection conn = null;
+        try {
+            Class.forName(Utils.getJDBCDriver()).newInstance();
+            conn = DriverManager.getConnection(jdbcUrl, jdbcUser, jdbcPassword);
+            if (!isDatabaseStructureCreated(PERSISTANT_DATA, conn)) {
+                executeSQLScript(conn);
+                logger.info("New Database created for Registry");
+            } else {
+                logger.debug("Database already created for Registry!");
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException("Database failure", e);
+        } finally {
+            try {
+                if (conn != null){
+                    if (!conn.getAutoCommit()) {
+                        conn.commit();
+                    }
+                    conn.close();
+                }
+            } catch (SQLException e) {
+                logger.error(e.getMessage(), e);
+                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+            }
+        }
+
+        try{
+            GatewayResource gatewayResource = (GatewayResource)ResourceUtils.createGateway(RegistrySettings.getSetting("default.registry.gateway"));
+            gatewayResource.setOwner(RegistrySettings.getSetting("default.registry.gateway"));
+            gatewayResource.save();
+
+            UserResource userResource = ResourceUtils.createUser(RegistrySettings.getSetting("default.registry.user"),RegistrySettings.getSetting("default.registry.password"));
+            userResource.save();
+
+            WorkerResource workerResource = (WorkerResource) gatewayResource.create(ResourceType.GATEWAY_WORKER);
+            workerResource.setUser(userResource.getUserName());
+            workerResource.save();
+
+            ProjectResource resource = (ProjectResource)gatewayResource.create(ResourceType.PROJECT);
+            resource.setName("default");
+            resource.setWorker(workerResource);
+            resource.save();
+
+        } catch (RegistrySettingsException e) {
+            logger.error("Unable to read properties", e);
+        }
+    }
+
+    public static boolean isDatabaseStructureCreated(String tableName, Connection conn) {
+        try {
+            System.out.println("Running a query to test the database tables existence.");
+            // check whether the tables are already created with a query
+            Statement statement = null;
+            try {
+                statement = conn.createStatement();
+                ResultSet rs = statement.executeQuery("select * from " + tableName);
+                if (rs != null) {
+                    rs.close();
+                }
+            } finally {
+                try {
+                    if (statement != null) {
+                        statement.close();
+                    }
+                } catch (SQLException e) {
+                    return false;
+                }
+            }
+        } catch (SQLException e) {
+            return false;
+        }
+
+        return true;
+    }
+
+    private void executeSQLScript(Connection conn) throws Exception {
+        StringBuffer sql = new StringBuffer();
+        BufferedReader reader = null;
+        try{
+
+        InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream(scriptName);
+        reader = new BufferedReader(new InputStreamReader(inputStream));
+        String line;
+        while ((line = reader.readLine()) != null) {
+            line = line.trim();
+            if (line.startsWith("//")) {
+                continue;
+            }
+            if (line.startsWith("--")) {
+                continue;
+            }
+            StringTokenizer st = new StringTokenizer(line);
+            if (st.hasMoreTokens()) {
+                String token = st.nextToken();
+                if ("REM".equalsIgnoreCase(token)) {
+                    continue;
+                }
+            }
+            sql.append(" ").append(line);
+
+            // SQL defines "--" as a comment to EOL
+            // and in Oracle it may contain a hint
+            // so we cannot just remove it, instead we must end it
+            if (line.indexOf("--") >= 0) {
+                sql.append("\n");
+            }
+            if ((checkStringBufferEndsWith(sql, delimiter))) {
+                executeSQL(sql.substring(0, sql.length() - delimiter.length()), conn);
+                sql.replace(0, sql.length(), "");
+            }
+        }
+        // Catch any statements not followed by ;
+        if (sql.length() > 0) {
+            executeSQL(sql.toString(), conn);
+        }
+        }catch (IOException e){
+            logger.error("Error occurred while executing SQL script for creating Airavata database", e);
+            throw new Exception("Error occurred while executing SQL script for creating Airavata database", e);
+        }finally {
+            if (reader != null) {
+                reader.close();
+            }
+
+        }
+
+    }
+
+    private static void executeSQL(String sql, Connection conn) throws Exception {
+        // Check and ignore empty statements
+        if ("".equals(sql.trim())) {
+            return;
+        }
+
+        Statement statement = null;
+        try {
+            logger.debug("SQL : " + sql);
+
+            boolean ret;
+            int updateCount = 0, updateCountTotal = 0;
+            statement = conn.createStatement();
+            ret = statement.execute(sql);
+            updateCount = statement.getUpdateCount();
+            do {
+                if (!ret) {
+                    if (updateCount != -1) {
+                        updateCountTotal += updateCount;
+                    }
+                }
+                ret = statement.getMoreResults();
+                if (ret) {
+                    updateCount = statement.getUpdateCount();
+                }
+            } while (ret);
+
+            logger.debug(sql + " : " + updateCountTotal + " rows affected");
+
+            SQLWarning warning = conn.getWarnings();
+            while (warning != null) {
+                logger.warn(warning + " sql warning");
+                warning = warning.getNextWarning();
+            }
+            conn.clearWarnings();
+        } catch (SQLException e) {
+            if (e.getSQLState().equals("X0Y32")) {
+                // eliminating the table already exception for the derby
+                // database
+                logger.info("Table Already Exists", e);
+            } else {
+                throw new Exception("Error occurred while executing : " + sql, e);
+            }
+        } finally {
+            if (statement != null) {
+                try {
+                    statement.close();
+                } catch (SQLException e) {
+                    logger.error("Error occurred while closing result set.", e);
+                }
+            }
+        }
+    }
+
+    private void startDerbyInServerMode() {
+        try {
+            System.setProperty(DERBY_SERVER_MODE_SYS_PROPERTY, "true");
+            server = new NetworkServerControl(InetAddress.getByName(Utils.getHost()),
+                    20000,
+                    Utils.getJDBCUser(), Utils.getJDBCPassword());
+            java.io.PrintWriter consoleWriter = new java.io.PrintWriter(System.out, true);
+            server.start(consoleWriter);
+        } catch (IOException e) {
+            logger.error("Unable to start Apache derby in the server mode! Check whether " +
+                    "specified port is available");
+        } catch (Exception e) {
+            logger.error("Unable to start Apache derby in the server mode! Check whether " +
+                    "specified port is available");
+        }
+
+    }
+
+    private void startDerbyInEmbeddedMode(){
+        try {
+            Class.forName("org.apache.derby.jdbc.EmbeddedDriver");
+            DriverManager.getConnection("jdbc:derby:memory:unit-testing-jpa;create=true").close();
+        } catch (ClassNotFoundException e) {
+            e.printStackTrace();
+        } catch (SQLException e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void stopDerbyServer() {
+        try {
+            server.shutdown();
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/orchestrator/airavata-orchestrator-service/src/test/resources/airavata-server.properties
----------------------------------------------------------------------
diff --git a/modules/orchestrator/airavata-orchestrator-service/src/test/resources/airavata-server.properties b/modules/orchestrator/airavata-orchestrator-service/src/test/resources/airavata-server.properties
index 0526808..e395a76 100644
--- a/modules/orchestrator/airavata-orchestrator-service/src/test/resources/airavata-server.properties
+++ b/modules/orchestrator/airavata-orchestrator-service/src/test/resources/airavata-server.properties
@@ -103,8 +103,7 @@ myproxy.user=ogce
 myproxy.pass=
 myproxy.life=3600
 # XSEDE Trusted certificates can be downloaded from https://software.xsede.org/security/xsede-certs.tar.gz
-trusted.cert.location=/Users/chathuri/dev/airavata/cert/certificates
-
+trusted.cert.location=/Users/lahirugunathilake/Downloads/certificates
 # SSH PKI key pair or ssh password can be used SSH based authentication is used.
 # if user specify both password authentication gets the higher preference
 
@@ -252,3 +251,33 @@ registry.service.wsdl=http://localhost:${port}/${server.context-root}/services/R
 
 # If false, disables two phase commit when submitting jobs
 TwoPhase=true
+
+
+###---------------------------Monitoring module Configurations---------------------------###
+#This will be the primary monitoring tool which runs in airavata, in future there will be multiple monitoring
+#mechanisms and one would be able to start a monitor
+monitors=org.apache.airavata.job.monitor.impl.pull.qstat.QstatMonitor,org.apache.airavata.job.monitor.impl.LocalJobMonitor
+#,org.apache.airavata.job.monitor.impl.push.amqp.AMQPMonitor
+#This is the amqp related configuration and this lists down the Rabbitmq host, this is an xsede specific configuration
+amqp.hosts=info1.dyn.teragrid.org,info2.dyn.teragrid.org
+proxy.file.path=/Users/lahirugunathilake/Downloads/x509up_u503876
+connection.name=xsede_private
+
+
+###---------------------------Orchestrator module Configurations---------------------------###
+job.submitter=org.apache.airavata.orchestrator.core.impl.EmbeddedGFACJobSubmitter
+job.validator=org.apache.airavata.orchestrator.core.validator.impl.SimpleAppDataValidator
+submitter.interval=10000
+threadpool.size=10
+start.submitter=true
+embedded.mode=true
+enable.validation=false
+orchestrator=org.apache.airavata.orchestrator.server.OrchestratorServer
+
+###---------------------------API Server module Configurations---------------------------###
+apiserver=org.apache.airavata.api.server.AiravataAPIServer
+
+###---------------------------Airavata Server Configurations---------------------------###
+servers=apiserver,orchestrator
+#shutdown.trategy=NONE
+shutdown.trategy=SELF_TERMINATE

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/orchestrator/airavata-orchestrator-service/src/test/resources/registry-derby.sql
----------------------------------------------------------------------
diff --git a/modules/orchestrator/airavata-orchestrator-service/src/test/resources/registry-derby.sql b/modules/orchestrator/airavata-orchestrator-service/src/test/resources/registry-derby.sql
new file mode 100644
index 0000000..e1bf9e6
--- /dev/null
+++ b/modules/orchestrator/airavata-orchestrator-service/src/test/resources/registry-derby.sql
@@ -0,0 +1,391 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+CREATE TABLE GATEWAY
+(
+        GATEWAY_NAME VARCHAR(255),
+	    OWNER VARCHAR(255),
+        PRIMARY KEY (GATEWAY_NAME)
+);
+
+CREATE TABLE CONFIGURATION
+(
+        CONFIG_KEY VARCHAR(255),
+        CONFIG_VAL VARCHAR(255),
+        EXPIRE_DATE TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        CATEGORY_ID VARCHAR (255),
+        PRIMARY KEY(CONFIG_KEY, CONFIG_VAL, CATEGORY_ID)
+);
+
+INSERT INTO CONFIGURATION (CONFIG_KEY, CONFIG_VAL, EXPIRE_DATE, CATEGORY_ID) VALUES('registry.version', '0.12', CURRENT_TIMESTAMP ,'SYSTEM');
+
+CREATE TABLE USERS
+(
+        USER_NAME VARCHAR(255),
+        PASSWORD VARCHAR(255),
+        PRIMARY KEY(USER_NAME)
+);
+
+CREATE TABLE GATEWAY_WORKER
+(
+        GATEWAY_NAME VARCHAR(255),
+        USER_NAME VARCHAR(255),
+        PRIMARY KEY (GATEWAY_NAME, USER_NAME),
+        FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+        FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE PROJECT
+(
+         GATEWAY_NAME VARCHAR(255),
+         USER_NAME VARCHAR(255),
+         PROJECT_NAME VARCHAR(255),
+         DESCRIPTION VARCHAR(255),
+         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+         PRIMARY KEY (PROJECT_NAME),
+         FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+         FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE PROJECT_USER
+(
+    PROJECT_NAME VARCHAR(255),
+    USER_NAME VARCHAR(255),
+    PRIMARY KEY (PROJECT_NAME,USER_NAME),
+    FOREIGN KEY (PROJECT_NAME) REFERENCES PROJECT(PROJECT_NAME) ON DELETE CASCADE,
+    FOREIGN KEY (USER_NAME) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE PUBLISHED_WORKFLOW
+(
+         GATEWAY_NAME VARCHAR(255),
+         CREATED_USER VARCHAR(255),
+         PUBLISH_WORKFLOW_NAME VARCHAR(255),
+         VERSION VARCHAR(255),
+         PUBLISHED_DATE TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+         PATH VARCHAR (255),
+         WORKFLOW_CONTENT BLOB,
+         PRIMARY KEY(GATEWAY_NAME, PUBLISH_WORKFLOW_NAME),
+         FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+         FOREIGN KEY (CREATED_USER) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE USER_WORKFLOW
+(
+         GATEWAY_NAME VARCHAR(255),
+         OWNER VARCHAR(255),
+         TEMPLATE_NAME VARCHAR(255),
+         LAST_UPDATED_TIME TIMESTAMP DEFAULT CURRENT TIMESTAMP,
+         PATH VARCHAR (255),
+         WORKFLOW_GRAPH BLOB,
+         PRIMARY KEY(GATEWAY_NAME, OWNER, TEMPLATE_NAME),
+         FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+         FOREIGN KEY (OWNER) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE HOST_DESCRIPTOR
+(
+         GATEWAY_NAME VARCHAR(255),
+         UPDATED_USER VARCHAR(255),
+         HOST_DESCRIPTOR_ID VARCHAR(255),
+         HOST_DESCRIPTOR_XML BLOB,
+         PRIMARY KEY(GATEWAY_NAME, HOST_DESCRIPTOR_ID),
+         FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+         FOREIGN KEY (UPDATED_USER) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE SERVICE_DESCRIPTOR
+(
+         GATEWAY_NAME VARCHAR(255),
+         UPDATED_USER VARCHAR(255),
+         SERVICE_DESCRIPTOR_ID VARCHAR(255),
+         SERVICE_DESCRIPTOR_XML BLOB,
+         PRIMARY KEY(GATEWAY_NAME,SERVICE_DESCRIPTOR_ID),
+         FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+         FOREIGN KEY (UPDATED_USER) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE APPLICATION_DESCRIPTOR
+(
+         GATEWAY_NAME VARCHAR(255),
+         UPDATED_USER VARCHAR(255),
+         APPLICATION_DESCRIPTOR_ID VARCHAR(255),
+         HOST_DESCRIPTOR_ID VARCHAR(255),
+         SERVICE_DESCRIPTOR_ID VARCHAR(255),
+         APPLICATION_DESCRIPTOR_XML BLOB,
+         PRIMARY KEY(GATEWAY_NAME,APPLICATION_DESCRIPTOR_ID),
+         FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+         FOREIGN KEY (UPDATED_USER) REFERENCES USERS(USER_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE EXPERIMENT
+(
+        EXPERIMENT_ID VARCHAR(255),
+        GATEWAY_NAME VARCHAR(255),
+        EXECUTION_USER VARCHAR(255),
+        PROJECT_NAME VARCHAR(255),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        EXPERIMENT_NAME VARCHAR(255) NOT NULL,
+        EXPERIMENT_DESCRIPTION VARCHAR(255),
+        APPLICATION_ID VARCHAR(255),
+        APPLICATION_VERSION VARCHAR(255),
+        WORKFLOW_TEMPLATE_ID VARCHAR(255),
+        WORKFLOW_TEMPLATE_VERSION VARCHAR(255),
+        WORKFLOW_EXECUTION_ID VARCHAR(255),
+        PRIMARY KEY(EXPERIMENT_ID),
+        FOREIGN KEY (GATEWAY_NAME) REFERENCES GATEWAY(GATEWAY_NAME) ON DELETE CASCADE,
+        FOREIGN KEY (PROJECT_NAME) REFERENCES PROJECT(PROJECT_NAME) ON DELETE CASCADE
+);
+
+CREATE TABLE EXPERIMENT_INPUT
+(
+        EXPERIMENT_ID VARCHAR(255),
+        INPUT_KEY VARCHAR(255),
+        INPUT_TYPE VARCHAR(255),
+        METADATA VARCHAR(255),
+        VALUE VARCHAR(255),
+        PRIMARY KEY(EXPERIMENT_ID,INPUT_KEY),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE EXPERIMENT_OUTPUT
+(
+        EXPERIMENT_ID VARCHAR(255),
+        OUTPUT_KEY VARCHAR(255),
+        OUTPUT_KEY_TYPE VARCHAR(255),
+        METADATA VARCHAR(255),
+        VALUE VARCHAR(255),
+        PRIMARY KEY(EXPERIMENT_ID,OUTPUT_KEY),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE WORKFLOW_NODE_DETAIL
+(
+        EXPERIMENT_ID VARCHAR(255),
+        NODE_INSTANCE_ID VARCHAR(255),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        NODE_NAME VARCHAR(255),
+        PRIMARY KEY(NODE_INSTANCE_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE TASK_DETAIL
+(
+        TASK_ID VARCHAR(255),
+        NODE_INSTANCE_ID VARCHAR(255),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        APPLICATION_ID VARCHAR(255),
+        APPLICATION_VERSION VARCHAR(255),
+        PRIMARY KEY(TASK_ID),
+        FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE ERROR_DETAIL
+(
+         ERROR_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+         EXPERIMENT_ID VARCHAR(255),
+         TASK_ID VARCHAR(255),
+         NODE_INSTANCE_ID VARCHAR(255),
+         JOB_ID VARCHAR(255),
+         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+         ACTUAL_ERROR_MESSAGE CLOB,
+         USER_FRIEDNLY_ERROR_MSG VARCHAR(255),
+         TRANSIENT_OR_PERSISTENT SMALLINT,
+         ERROR_CATEGORY VARCHAR(255),
+         CORRECTIVE_ACTION VARCHAR(255),
+         ACTIONABLE_GROUP VARCHAR(255),
+         PRIMARY KEY(ERROR_ID),
+         FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+         FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE,
+         FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE APPLICATION_INPUT
+(
+        TASK_ID VARCHAR(255),
+        INPUT_KEY VARCHAR(255),
+        INPUT_KEY_TYPE VARCHAR(255),
+        METADATA VARCHAR(255),
+        VALUE VARCHAR(255),
+        PRIMARY KEY(TASK_ID,INPUT_KEY),
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE APPLICATION_OUTPUT
+(
+        TASK_ID VARCHAR(255),
+        OUTPUT_KEY VARCHAR(255),
+        OUTPUT_KEY_TYPE VARCHAR(255),
+        METADATA VARCHAR(255),
+        VALUE VARCHAR(255),
+        PRIMARY KEY(TASK_ID,OUTPUT_KEY),
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE NODE_INPUT
+(
+       NODE_INSTANCE_ID VARCHAR(255),
+       INPUT_KEY VARCHAR(255),
+       INPUT_KEY_TYPE VARCHAR(255),
+       METADATA VARCHAR(255),
+       VALUE VARCHAR(255),
+       PRIMARY KEY(NODE_INSTANCE_ID,INPUT_KEY),
+       FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE NODE_OUTPUT
+(
+       NODE_INSTANCE_ID VARCHAR(255),
+       OUTPUT_KEY VARCHAR(255),
+       OUTPUT_KEY_TYPE VARCHAR(255),
+       METADATA VARCHAR(255),
+       VALUE VARCHAR(255),
+       PRIMARY KEY(NODE_INSTANCE_ID,OUTPUT_KEY),
+       FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE JOB_DETAIL
+(
+        JOB_ID VARCHAR(255),
+        TASK_ID VARCHAR(255),
+        JOB_DESCRIPTION CLOB,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        COMPUTE_RESOURCE_CONSUMED VARCHAR(255),
+        PRIMARY KEY (TASK_ID, JOB_ID),
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE DATA_TRANSFER_DETAIL
+(
+        TRANSFER_ID VARCHAR(255),
+        TASK_ID VARCHAR(255),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        TRANSFER_DESC CLOB,
+        PRIMARY KEY(TRANSFER_ID),
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE STATUS
+(
+        STATUS_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+        EXPERIMENT_ID VARCHAR(255),
+        NODE_INSTANCE_ID VARCHAR(255),
+        TRANSFER_ID VARCHAR(255),
+        TASK_ID VARCHAR(255),
+        JOB_ID VARCHAR(255),
+        STATE VARCHAR(255),
+        STATUS_UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        STATUS_TYPE VARCHAR(255),
+        PRIMARY KEY(STATUS_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE,
+        FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE,
+        FOREIGN KEY (TRANSFER_ID) REFERENCES DATA_TRANSFER_DETAIL(TRANSFER_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE CONFIG_DATA
+(
+        EXPERIMENT_ID VARCHAR(255),
+        AIRAVATA_AUTO_SCHEDULE SMALLINT,
+        OVERRIDE_MANUAL_SCHEDULE_PARAMS SMALLINT,
+        SHARE_EXPERIMENT SMALLINT,
+        PRIMARY KEY(EXPERIMENT_ID)
+);
+
+CREATE TABLE COMPUTATIONAL_RESOURCE_SCHEDULING
+(
+        RESOURCE_SCHEDULING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+        EXPERIMENT_ID VARCHAR(255),
+        TASK_ID VARCHAR(255),
+        RESOURCE_HOST_ID VARCHAR(255),
+        CPU_COUNT INTEGER,
+        NODE_COUNT INTEGER,
+        NO_OF_THREADS INTEGER,
+        QUEUE_NAME VARCHAR(255),
+        WALLTIME_LIMIT INTEGER,
+        JOB_START_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        TOTAL_PHYSICAL_MEMORY INTEGER,
+        COMPUTATIONAL_PROJECT_ACCOUNT VARCHAR(255),
+        PRIMARY KEY(RESOURCE_SCHEDULING_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE ADVANCE_INPUT_DATA_HANDLING
+(
+       INPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+       EXPERIMENT_ID VARCHAR(255),
+       TASK_ID VARCHAR(255),
+       WORKING_DIR_PARENT VARCHAR(255),
+       UNIQUE_WORKING_DIR VARCHAR(255),
+       STAGE_INPUT_FILES_TO_WORKING_DIR SMALLINT,
+       CLEAN_AFTER_JOB SMALLINT,
+       PRIMARY KEY(INPUT_DATA_HANDLING_ID),
+       FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+       FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE ADVANCE_OUTPUT_DATA_HANDLING
+(
+       OUTPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+       EXPERIMENT_ID VARCHAR(255),
+       TASK_ID VARCHAR(255),
+       OUTPUT_DATA_DIR VARCHAR(255),
+       DATA_REG_URL VARCHAR (255),
+       PERSIST_OUTPUT_DATA SMALLINT,
+       PRIMARY KEY(OUTPUT_DATA_HANDLING_ID),
+       FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+       FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE QOS_PARAM
+(
+        QOS_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+        EXPERIMENT_ID VARCHAR(255),
+        TASK_ID VARCHAR(255),
+        START_EXECUTION_AT VARCHAR(255),
+        EXECUTE_BEFORE VARCHAR(255),
+        NO_OF_RETRIES INTEGER,
+        PRIMARY KEY(QOS_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE COMMUNITY_USER
+(
+        GATEWAY_NAME VARCHAR(256) NOT NULL,
+        COMMUNITY_USER_NAME VARCHAR(256) NOT NULL,
+        TOKEN_ID VARCHAR(256) NOT NULL,
+        COMMUNITY_USER_EMAIL VARCHAR(256) NOT NULL,
+        PRIMARY KEY (GATEWAY_NAME, COMMUNITY_USER_NAME, TOKEN_ID)
+);
+
+CREATE TABLE CREDENTIALS
+(
+        GATEWAY_ID VARCHAR(256) NOT NULL,
+        TOKEN_ID VARCHAR(256) NOT NULL,
+        CREDENTIAL BLOB NOT NULL,
+        PORTAL_USER_ID VARCHAR(256) NOT NULL,
+        TIME_PERSISTED TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        PRIMARY KEY (GATEWAY_ID, TOKEN_ID)
+);
+
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/orchestrator/orchestrator-core/pom.xml
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-core/pom.xml b/modules/orchestrator/orchestrator-core/pom.xml
index cbe10b0..060d5a9 100644
--- a/modules/orchestrator/orchestrator-core/pom.xml
+++ b/modules/orchestrator/orchestrator-core/pom.xml
@@ -97,17 +97,6 @@ the License. -->
                     <target>1.6</target>
                 </configuration>
             </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <excludes>
-                        <exclude>**/ssh/**</exclude>
-                    </excludes>
-                    <forkMode>always</forkMode>
-                    <failIfNoTests>false</failIfNoTests>
-                </configuration>
-            </plugin>
         </plugins>
         <testSourceDirectory>${project.basedir}/src/test/java</testSourceDirectory>
         <testOutputDirectory>${project.build.directory}/test-classes</testOutputDirectory>

http://git-wip-us.apache.org/repos/asf/airavata/blob/9c555455/modules/registry/airavata-jpa-registry/pom.xml
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/pom.xml b/modules/registry/airavata-jpa-registry/pom.xml
index 751ba6d..e70db93 100644
--- a/modules/registry/airavata-jpa-registry/pom.xml
+++ b/modules/registry/airavata-jpa-registry/pom.xml
@@ -138,97 +138,23 @@
                     </execution>
                 </executions>
             </plugin>
-            <!--<plugin>-->
-                <!--<groupId>org.apache.maven.plugins</groupId>-->
-                <!--<artifactId>maven-antrun-plugin</artifactId>-->
-                <!--<version>1.7</version>-->
-                <!--<executions>-->
-                    <!--<execution>-->
-                        <!--<phase>compile</phase>-->
-                        <!--<configuration>-->
-                            <!--<tasks>-->
-                                <!--<path id="enhance.path.ref">-->
-                                    <!--<fileset dir="${project.build.outputDirectory}">-->
-                                        <!--<include name="Application_Descriptor.class" />-->
-                                        <!--<include name="Application_Descriptor_PK.class" />-->
-                                        <!--<include name="Configuration.class" />-->
-                                        <!--<include name="Configuration_PK.class" />-->
-                                        <!--<include name="Execution_Error.class" />-->
-                                        <!--<include name="Experiment_Configuration_Data.class" />-->
-                                        <!--<include name="Experiment_Input.class" />-->
-                                        <!--<include name="Experiment_Input_PK.class" />-->
-                                        <!--<include name="Experiment_Metadata.class" />-->
-                                        <!--<include name="Experiment_Output.class" />-->
-                                        <!--<include name="Experiment_Output_PK.class" />-->
-                                        <!--<include name="Experiment_Output.class" />-->
-                                        <!--<include name="Gateway.class" />-->
-                                        <!--<include name="Gateway_Worker.class" />-->
-                                        <!--<include name="Gateway_Worker_PK.class" />-->
-                                        <!--<include name="GFac_Job_Data.class" />-->
-                                        <!--<include name="GFac_Job_Status.class" />-->
-                                        <!--<include name="Gram_Data.class" />-->
-                                        <!--<include name="Gram_Data_PK.class" />-->
-                                        <!--<include name="Host_Descriptor.class" />-->
-                                        <!--<include name="Host_Descriptor_PK.class" />-->
-                                        <!--<include name="Node_Data.class" />-->
-                                        <!--<include name="Node_Data_PK.class" />-->
-                                        <!--<include name="Orchestrator.class" />-->
-                                        <!--<include name="Project.class" />-->
-                                        <!--<include name="Published_Workflow.class" />-->
-                                        <!--<include name="Published_Workflow_PK.class" />-->
-                                        <!--<include name="Service_Descriptor.class" />-->
-                                        <!--<include name="Service_Descriptor_PK.class" />-->
-                                        <!--<include name="User_Workflow.class" />-->
-                                        <!--<include name="User_Workflow_PK.class" />-->
-                                        <!--<include name="Users.class" />-->
-                                        <!--<include name="Workflow_Data.class" />-->
-                                    <!--</fileset>-->
-                                <!--</path>-->
-                                <!--<pathconvert property="enhance.files" refid="enhance.path.ref" pathsep=" " />-->
-                                <!--<java classname="org.apache.openjpa.enhance.PCEnhancer">-->
-                                    <!--<arg line="-p persistence.xml" />-->
-                                    <!--<arg line="${enhance.files}" />-->
-                                    <!--<classpath>-->
-                                        <!--<path refid="maven.dependency.classpath" />-->
-                                        <!--<path refid="maven.compile.classpath" />-->
-                                    <!--</classpath>-->
-                                <!--</java>-->
-                            <!--</tasks>-->
-                        <!--</configuration>-->
-                        <!--<goals>-->
-                            <!--<goal>run</goal>-->
-                        <!--</goals>-->
-                    <!--</execution>-->
-                <!--</executions>-->
-            <!--</plugin>-->
-            <!--<plugin>-->
-                <!--<groupId>org.apache.openjpa</groupId>-->
-                <!--<artifactId>openjpa-maven-plugin</artifactId>-->
-                <!--<version>2.2.0</version>-->
-                <!--<configuration>-->
-                    <!--<includes>**/model/*.class</includes>-->
-                    <!--<excludes>**/model/XML*.class</excludes>-->
-                    <!--<addDefaultConstructor>true</addDefaultConstructor>-->
-                    <!--<enforcePropertyRestrictions>true</enforcePropertyRestrictions>-->
-                <!--</configuration>-->
-                <!--<executions>-->
-                    <!--<execution>-->
-                        <!--<id>enhancer</id>-->
-                        <!--<phase>process-classes</phase>-->
-                        <!--<goals>-->
-                            <!--<goal>enhance</goal>-->
-                        <!--</goals>-->
-                    <!--</execution>-->
-                <!--</executions>-->
-                <!--<dependencies>-->
-                    <!--<dependency>-->
-                        <!--<groupId>org.apache.openjpa</groupId>-->
-                        <!--<artifactId>openjpa</artifactId>-->
-                        <!--&lt;!&ndash; set the version to be the same as the level in your runtime &ndash;&gt;-->
-                        <!--<version>1.2.2</version>-->
-                    <!--</dependency>-->
-                <!--</dependencies>-->
-            <!--</plugin>-->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>${surefire.version}</version>
+                <inherited>true</inherited>
+                <configuration>
+                    <failIfNoTests>false</failIfNoTests>
+                    <skipTests>${skipTests}</skipTests>
+                    <workingDirectory>${project.build.testOutputDirectory}</workingDirectory>
+                    <!-- making sure that the sure-fire plugin doesn't run the integration tests-->
+                    <!-- Integration tests are run using the fail-safe plugin in the module pom-->
+                    <excludes>
+                        <exclude>**/TaskDetailResourceTest.java</exclude>
+                        <exclude>**/WorkflowNodeDetailResourceTest.java</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
         </plugins>
     </build>