You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by di...@apache.org on 2018/06/18 15:53:18 UTC

[airavata] branch group-based-auth updated: Test framework intital commit

This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch group-based-auth
in repository https://gitbox.apache.org/repos/asf/airavata.git


The following commit(s) were added to refs/heads/group-based-auth by this push:
     new 8863fb9  Test framework intital commit
8863fb9 is described below

commit 8863fb9cf06efb9ec49a2b3fcfc306edf32ec758
Author: dimuthu <di...@gmail.com>
AuthorDate: Mon Jun 18 11:53:07 2018 -0400

    Test framework intital commit
---
 modules/registry-refactoring/pom.xml               |   6 +
 .../appcatalog/ApplicationInputEntity.java         |   5 +-
 .../core/entities/appcatalog/BatchQueueEntity.java |  14 +-
 .../appcatalog/UnicoreDatamovementEntity.java      |  12 +-
 .../ApplicationDeploymentRepository.java           |  82 ++-
 .../appcatalog/ApplicationInterfaceRepository.java |   7 +-
 .../appcatalog/ComputeResourceRepository.java      |  34 +-
 .../airavata/registry/core/utils/DBConstants.java  |   4 +
 .../registry/core/utils/QueryConstants.java        |   3 +
 .../src/main/resources/appcatalog-mysql.sql        |   2 +-
 .../ApplicationDeploymentRepositoryTest.java       | 332 +++++++---
 .../ApplicationInterfaceRepositoryTest.java        | 464 +++++++++++---
 .../appcatalog/ComputeResourceRepositoryTest.java  | 693 ++++++++++++++++-----
 .../appcatalog/GatewayGroupsRepositoryTest.java    |  25 +-
 .../appcatalog/GatewayProfileRepositoryTest.java   |  26 +-
 .../GroupResourceProfileRepositoryTest.java        |  31 +-
 .../appcatalog/StorageResourceRepositoryTest.java  |  26 +-
 .../UserResourceProfileRepositoryTest.java         |  28 +-
 .../repositories/appcatalog/util/Initialize.java   | 318 ----------
 .../core/repositories/common/DerbyDBManager.java   | 239 +++++++
 .../core/repositories/common/JdbcUtil.java         | 357 +++++++++++
 .../core/repositories/common/TestBase.java         |  78 +++
 .../expcatalog/ExperimentRepositoryTest.java       |  29 +-
 .../resources/appcatalog-derby.sql}                | 244 ++++----
 .../src/test/resources/expcatalog-derby.sql        | 398 ++++++++++++
 .../src/test/resources/replicacatalog-derby.sql    |  77 +++
 .../src/test/resources/workflowcatalog-derby.sql   | 128 ++++
 .../catalog/impl/ApplicationDeploymentImpl.java    |   4 +-
 .../airavata/app/catalog/AppDeploymentTest.java    |   6 +-
 .../registry/cpi/ApplicationDeployment.java        |   4 +-
 .../api/service/handler/RegistryServerHandler.java |   8 +-
 31 files changed, 2737 insertions(+), 947 deletions(-)

diff --git a/modules/registry-refactoring/pom.xml b/modules/registry-refactoring/pom.xml
index 509603a..716664b 100644
--- a/modules/registry-refactoring/pom.xml
+++ b/modules/registry-refactoring/pom.xml
@@ -96,6 +96,12 @@
             <artifactId>airavata-registry-cpi</artifactId>
             <version>0.17-SNAPSHOT</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.ibatis</groupId>
+            <artifactId>ibatis-sqlmap</artifactId>
+            <version>2.3.0</version>
+        </dependency>
+
     </dependencies>
 
     <build>
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java
index e6c7ddb..6a9353c 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/ApplicationInputEntity.java
@@ -172,12 +172,13 @@ public class ApplicationInputEntity implements Serializable {
 		this.standardInput = standardInput;
 	}
 
-	public String getserFriendlyDescription() {
+	public String getUserFriendlyDescription() {
 		return userFriendlyDescription;
 	}
 
-	public void setserFriendlyDescription(String userFriendlyDescription) {
+	public ApplicationInputEntity setUserFriendlyDescription(String userFriendlyDescription) {
 		this.userFriendlyDescription = userFriendlyDescription;
+		return this;
 	}
 
 	public boolean getIsReadOnly() {
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java
index 024e983..390647b 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/BatchQueueEntity.java
@@ -41,7 +41,7 @@ public class BatchQueueEntity implements Serializable {
     private String queueName;
 
     @Column(name = "MAX_JOB_IN_QUEUE")
-    private int maxJobInQueue;
+    private int maxJobsInQueue;
 
     @Column(name = "MAX_MEMORY")
     private int maxMemory;
@@ -99,12 +99,12 @@ public class BatchQueueEntity implements Serializable {
         this.queueName = queueName;
     }
 
-    public int getMaxJobInQueue() {
-        return maxJobInQueue;
+    public int getMaxJobsInQueue() {
+        return maxJobsInQueue;
     }
 
-    public void setMaxJobInQueue(int maxJobInQueue) {
-        this.maxJobInQueue = maxJobInQueue;
+    public void setMaxJobsInQueue(int maxJobsInQueue) {
+        this.maxJobsInQueue = maxJobsInQueue;
     }
 
     public int getMaxMemory() {
@@ -195,11 +195,11 @@ public class BatchQueueEntity implements Serializable {
         this.queueSpecificMacros = queueSpecificMacros;
     }
 
-    public boolean isDefaultQueue() {
+    public boolean getIsDefaultQueue() {
         return isDefaultQueue;
     }
 
-    public void setDefaultQueue(boolean defaultQueue) {
+    public void setIsDefaultQueue(boolean defaultQueue) {
         isDefaultQueue = defaultQueue;
     }
 }
\ No newline at end of file
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UnicoreDatamovementEntity.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UnicoreDatamovementEntity.java
index 9c422fc..473992b 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UnicoreDatamovementEntity.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/entities/appcatalog/UnicoreDatamovementEntity.java
@@ -42,8 +42,8 @@ public class UnicoreDatamovementEntity implements Serializable {
 	@Column(name="DATAMOVEMENT_ID")
 	private String dataMovementInterfaceId;
 
-	@Column(name="SECURITY_PROTOCAL")
-	private SecurityProtocol securityProtocal;
+	@Column(name="SECURITY_PROTOCOL")
+	private SecurityProtocol securityProtocol;
 
 	@Column(name="UNICORE_ENDPOINT_URL")
 	private String unicoreEndpointUrl;
@@ -59,12 +59,12 @@ public class UnicoreDatamovementEntity implements Serializable {
 		this.dataMovementInterfaceId = dataMovementInterfaceId;
 	}
 
-	public SecurityProtocol getSecurityProtocal() {
-		return securityProtocal;
+	public SecurityProtocol getSecurityProtocol() {
+		return securityProtocol;
 	}
 
-	public void setSecurityProtocal(SecurityProtocol securityProtocal) {
-		this.securityProtocal = securityProtocal;
+	public void setSecurityProtocol(SecurityProtocol securityProtocol) {
+		this.securityProtocol = securityProtocol;
 	}
 
 	public String getUnicoreEndpointUrl() {
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepository.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepository.java
index 2dfa516..a9988cf 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepository.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepository.java
@@ -115,29 +115,65 @@ public class ApplicationDeploymentRepository extends AppCatAbstractRepository<Ap
     }
 
     @Override
-    public List<ApplicationDeploymentDescription> getApplicationDeployements(Map<String, String> filters) throws AppCatalogException {
-        if(filters.containsKey(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID)) {
-            logger.debug("Fetching all Application Deployments for given Application Module ID");
-            Map<String, Object> queryParameters = new HashMap<>();
-            queryParameters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, filters.get(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID));
-            List<ApplicationDeploymentDescription> applicationDeploymentDescriptionList =
-                    select(QueryConstants.FIND_APPLICATION_DEPLOYMENTS_FOR_APPLICATION_MODULE_ID, -1, 0, queryParameters);
-            return applicationDeploymentDescriptionList;
-        }
-
-        else if(filters.containsKey(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID)) {
-            logger.debug("Fetching Application Deployments for given Compute Host ID");
-            Map<String, Object> queryParameters = new HashMap<>();
-            queryParameters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, filters.get(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID));
-            List<ApplicationDeploymentDescription> applicationDeploymentDescriptionList =
-                    select(QueryConstants.FIND_APPLICATION_DEPLOYMENTS_FOR_COMPUTE_HOST_ID, -1, 0, queryParameters);
-            return applicationDeploymentDescriptionList;
-        }
-
-        else {
-            logger.error("Unsupported field name for app deployment in filters: " + filters);
-            throw new IllegalArgumentException("Unsupported field name for app deployment in filters: " + filters);
+    public List<ApplicationDeploymentDescription> getApplicationDeployments(Map<String, String> filters) throws AppCatalogException {
+
+        List<ApplicationDeploymentDescription> deploymentDescriptions = new ArrayList<>();
+        try {
+            boolean firstTry=true;
+            for (String fieldName : filters.keySet() ){
+                List<ApplicationDeploymentDescription> tmpDescriptions;
+
+                switch (fieldName) {
+                    case DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID: {
+                        logger.debug("Fetching all Application Deployments for Application Module ID " +
+                                filters.get(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID));
+
+                        Map<String, Object> queryParameters = new HashMap<>();
+                        queryParameters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, filters.get(fieldName));
+                        tmpDescriptions = select(QueryConstants.FIND_APPLICATION_DEPLOYMENTS_FOR_APPLICATION_MODULE_ID, -1, 0, queryParameters);
+                        break;
+                    }
+
+                    case DBConstants.ApplicationDeployment.COMPUTE_HOST_ID: {
+                        logger.debug("Fetching Application Deployments for Compute Host ID " +
+                                filters.get(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID));
+
+                        Map<String, Object> queryParameters = new HashMap<>();
+                        queryParameters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, filters.get(fieldName));
+                        tmpDescriptions = select(QueryConstants.FIND_APPLICATION_DEPLOYMENTS_FOR_COMPUTE_HOST_ID, -1, 0, queryParameters);
+                        break;
+                    }
+
+                    default:
+                        logger.error("Unsupported field name for app deployment in filters: " + filters);
+                        throw new IllegalArgumentException("Unsupported field name for app deployment in filters: " + filters);
+
+                }
+
+                if (firstTry) {
+                    deploymentDescriptions.addAll(tmpDescriptions);
+                    firstTry=false;
+
+                } else {
+                    List<String> ids = new ArrayList<>();
+                    for (ApplicationDeploymentDescription applicationDeploymentDescription : deploymentDescriptions) {
+                        ids.add(applicationDeploymentDescription.getAppDeploymentId());
+                    }
+                    List<ApplicationDeploymentDescription> tmp2Descriptions = new ArrayList<>();
+                    for (ApplicationDeploymentDescription applicationDeploymentDescription : tmpDescriptions) {
+                        if (ids.contains(applicationDeploymentDescription.getAppDeploymentId())){
+                            tmp2Descriptions.add(applicationDeploymentDescription);
+                        }
+                    }
+                    deploymentDescriptions.clear();
+                    deploymentDescriptions.addAll(tmp2Descriptions);
+                }
+            }
+        } catch (Exception e) {
+            logger.error("Error while retrieving app deployment list...", e);
+            throw new AppCatalogException(e);
         }
+        return deploymentDescriptions;
     }
 
     @Override
@@ -150,7 +186,7 @@ public class ApplicationDeploymentRepository extends AppCatAbstractRepository<Ap
     }
 
     @Override
-    public List<ApplicationDeploymentDescription> getAccessibleApplicationDeployements(String gatewayId, List<String> accessibleAppIds, List<String> accessibleCompHostIds) throws AppCatalogException {
+    public List<ApplicationDeploymentDescription> getAccessibleApplicationDeployments(String gatewayId, List<String> accessibleAppIds, List<String> accessibleCompHostIds) throws AppCatalogException {
         Map<String, Object> queryParameters = new HashMap<>();
         queryParameters.put(DBConstants.ApplicationDeployment.GATEWAY_ID, gatewayId);
         queryParameters.put(DBConstants.ApplicationDeployment.ACCESSIBLE_APPLICATION_DEPLOYMENT_IDS, accessibleAppIds);
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepository.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepository.java
index edb7dac..2bad1bf 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepository.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepository.java
@@ -19,6 +19,7 @@
  */
 package org.apache.airavata.registry.core.repositories.appcatalog;
 
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.appinterface.application_interface_modelConstants;
@@ -33,10 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 public class ApplicationInterfaceRepository extends AppCatAbstractRepository<ApplicationInterfaceDescription, ApplicationInterfaceEntity, String> implements ApplicationInterface {
     private final static Logger logger = LoggerFactory.getLogger(ApplicationInterfaceRepository.class);
@@ -219,6 +217,7 @@ public class ApplicationInterfaceRepository extends AppCatAbstractRepository<App
     @Override
     public List<ApplicationModule> getAccessibleApplicationModules(String gatewayId, List<String> accessibleAppIds, List<String> accessibleCompHostIds) throws AppCatalogException {
         ApplicationModuleRepository applicationModuleRepository = new ApplicationModuleRepository();
+        ApplicationDeploymentRepository deploymentRepository = new ApplicationDeploymentRepository();
         Map<String, Object> queryParameters = new HashMap<>();
         queryParameters.put(DBConstants.ApplicationModule.GATEWAY_ID, gatewayId);
         queryParameters.put(DBConstants.ApplicationDeployment.ACCESSIBLE_APPLICATION_DEPLOYMENT_IDS, accessibleAppIds);
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepository.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepository.java
index 3f78700..7c2f44c 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepository.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepository.java
@@ -38,6 +38,7 @@ import javax.persistence.Query;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 public class ComputeResourceRepository extends AppCatAbstractRepository<ComputeResourceDescription, ComputeResourceEntity, String> implements ComputeResource {
 
@@ -125,7 +126,9 @@ public class ComputeResourceRepository extends AppCatAbstractRepository<ComputeR
     @Override
     public ComputeResourceDescription getComputeResource(String resourceId) throws AppCatalogException {
         ComputeResourceDescription computeResourceDescription = get(resourceId);
-        computeResourceDescription.setFileSystems(getFileSystems(resourceId));
+        if (computeResourceDescription != null) {
+            computeResourceDescription.setFileSystems(getFileSystems(resourceId));
+        }
         return computeResourceDescription;
     }
 
@@ -263,8 +266,10 @@ public class ComputeResourceRepository extends AppCatAbstractRepository<ComputeR
     public ResourceJobManager getResourceJobManager(String resourceJobManagerId) throws AppCatalogException {
         ResourceJobManagerRepository resourceJobManagerRepository = new ResourceJobManagerRepository();
         ResourceJobManager resourceJobManager = resourceJobManagerRepository.get(resourceJobManagerId);
-        resourceJobManager.setJobManagerCommands(resourceJobManagerRepository.getJobManagerCommand(resourceJobManagerId));
-        resourceJobManager.setParallelismPrefix(resourceJobManagerRepository.getParallelismPrefix(resourceJobManagerId));
+        if (resourceJobManager != null) {
+            resourceJobManager.setJobManagerCommands(resourceJobManagerRepository.getJobManagerCommand(resourceJobManagerId));
+            resourceJobManager.setParallelismPrefix(resourceJobManagerRepository.getParallelismPrefix(resourceJobManagerId));
+        }
         return resourceJobManager;
     }
 
@@ -424,10 +429,29 @@ public class ComputeResourceRepository extends AppCatAbstractRepository<ComputeR
     public GridFTPDataMovement getGridFTPDataMovement(String dataMoveId) throws AppCatalogException {
         GridftpDataMovementEntity entity = execute(entityManager -> entityManager
                 .find(GridftpDataMovementEntity.class, dataMoveId));
-        if(entity == null)
+        if(entity == null) {
             return null;
+        }
+
+        Map<String, Object> queryParameters = new HashMap<>();
+        queryParameters.put(DBConstants.DataMovement.GRID_FTP_DATA_MOVEMENT_ID, entity.getDataMovementInterfaceId());
+        List resultSet = execute(entityManager -> {
+            Query jpaQuery = entityManager.createQuery(QueryConstants.FIND_ALL_GRID_FTP_ENDPOINTS_BY_DATA_MOVEMENT);
+            for (Map.Entry<String, Object> entry : queryParameters.entrySet()) {
+                jpaQuery.setParameter(entry.getKey(), entry.getValue());
+            }
+            return jpaQuery.setFirstResult(0).getResultList();
+        });
+
+        List<GridftpEndpointEntity> endpointEntities = resultSet;
+
         Mapper mapper = ObjectMapperSingleton.getInstance();
-        return mapper.map(entity, GridFTPDataMovement.class);
+
+        List<String> endpoints = endpointEntities.stream().map(GridftpEndpointEntity::getEndpoint).collect(Collectors.toList());
+        GridFTPDataMovement dataMovement = mapper.map(entity, GridFTPDataMovement.class);
+        dataMovement.setGridFTPEndPoints(endpoints);
+
+        return dataMovement;
     }
 
     @Override
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/DBConstants.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/DBConstants.java
index 68b098f..05cee94 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/DBConstants.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/DBConstants.java
@@ -162,4 +162,8 @@ public class DBConstants {
         public static final String WORKFLOW_NAME = "workflowName";
     }
 
+    public static class DataMovement {
+        public static final String GRID_FTP_DATA_MOVEMENT_ID = "dataMovementId";
+    }
+
 }
diff --git a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/QueryConstants.java b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/QueryConstants.java
index 85aca5d..23cef08 100644
--- a/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/QueryConstants.java
+++ b/modules/registry-refactoring/src/main/java/org/apache/airavata/registry/core/utils/QueryConstants.java
@@ -160,4 +160,7 @@ public interface QueryConstants {
     String FIND_ALL_STORAGE_RESOURCES = "SELECT SR FROM " + StorageResourceEntity.class.getSimpleName() + " SR";
     String FIND_ALL_AVAILABLE_STORAGE_RESOURCES = "SELECT SR FROM " + StorageResourceEntity.class.getSimpleName() + " SR " +
             "WHERE SR.enabled = TRUE";
+
+    String FIND_ALL_GRID_FTP_ENDPOINTS_BY_DATA_MOVEMENT = "SELECT GFE FROM " + GridftpEndpointEntity.class.getSimpleName() +
+            " GFE WHERE GFE.gridftpDataMovement.dataMovementInterfaceId LIKE :" + DBConstants.DataMovement.GRID_FTP_DATA_MOVEMENT_ID;
 }
diff --git a/modules/registry-refactoring/src/main/resources/appcatalog-mysql.sql b/modules/registry-refactoring/src/main/resources/appcatalog-mysql.sql
index 39a0069..22cf506 100644
--- a/modules/registry-refactoring/src/main/resources/appcatalog-mysql.sql
+++ b/modules/registry-refactoring/src/main/resources/appcatalog-mysql.sql
@@ -564,7 +564,7 @@ CREATE TABLE USER_COMPUTE_RESOURCE_PREFERENCE (
   RESERVATION varchar(255) DEFAULT NULL,
   RESERVATION_END_TIME datetime DEFAULT NULL,
   RESERVATION_START_TIME datetime DEFAULT NULL,
-  SCRATCH_LOCATION varchar(255) NOT NULL DEFAULT NULL,
+  SCRATCH_LOCATION varchar(255) DEFAULT NULL,
   VALIDATED TINYINT(1) DEFAULT 0,
   PRIMARY KEY (RESOURCE_ID,USER_ID,GATEWAY_ID)
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepositoryTest.java
index 878b7e6..2836394 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepositoryTest.java
@@ -25,11 +25,11 @@ import org.apache.airavata.model.appcatalog.appdeployment.CommandObject;
 import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.parallelism.ApplicationParallelismType;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.core.utils.DBConstants;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.junit.After;
-import org.junit.Before;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -40,46 +40,45 @@ import static org.junit.Assert.assertFalse;
 
 import java.util.*;
 
-public class ApplicationDeploymentRepositoryTest {
+public class ApplicationDeploymentRepositoryTest extends TestBase {
+
+    private static final Logger logger = LoggerFactory.getLogger(ApplicationDeploymentRepositoryTest.class);
 
-    private static Initialize initialize;
     private ComputeResourceRepository computeResourceRepository;
     private ApplicationInterfaceRepository applicationInterfaceRepository;
     private ApplicationDeploymentRepository applicationDeploymentRepository;
     private String gatewayId = "testGateway";
-    private static final Logger logger = LoggerFactory.getLogger(ApplicationDeploymentRepositoryTest.class);
 
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            computeResourceRepository = new ComputeResourceRepository();
-            applicationInterfaceRepository = new ApplicationInterfaceRepository();
-            applicationDeploymentRepository = new ApplicationDeploymentRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
+    public ApplicationDeploymentRepositoryTest() {
+        super(Database.APP_CATALOG);
+        computeResourceRepository = new ComputeResourceRepository();
+        applicationInterfaceRepository = new ApplicationInterfaceRepository();
+        applicationDeploymentRepository = new ApplicationDeploymentRepository();
     }
 
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    private String addSampleApplicationModule(String tag) throws AppCatalogException {
+        ApplicationModule applicationModule = new ApplicationModule();
+        applicationModule.setAppModuleId("appMod" + tag);
+        applicationModule.setAppModuleName("appModName" + tag);
+        return applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
     }
 
-    @Test
-    public void ApplicationDeploymentRepositoryTest() throws AppCatalogException {
+    private String addSampleComputeResource(String tag) throws AppCatalogException {
         ComputeResourceDescription computeResourceDescription = new ComputeResourceDescription();
-        computeResourceDescription.setComputeResourceId("compHost1");
-        computeResourceDescription.setHostName("compHost1Name");
-        String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+        computeResourceDescription.setComputeResourceId("compHost" + tag);
+        computeResourceDescription.setHostName("compHostName" + tag);
+        return computeResourceRepository.addComputeResource(computeResourceDescription);
+    }
 
-        ApplicationModule applicationModule = new ApplicationModule();
-        applicationModule.setAppModuleId("appMod1");
-        applicationModule.setAppModuleName("appMod1Name");
-        applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+    private boolean deepCompareDeployment(ApplicationDeploymentDescription expected, ApplicationDeploymentDescription actual) {
+        boolean equals = true;
+        equals = equals & EqualsBuilder.reflectionEquals(expected, actual,
+                "moduleLoadCmds", "libPrependPaths", "libAppendPaths" ,"setEnvironment" ,"preJobCommands"
+                ,"postJobCommands", "__isset_bitfield");
+        return equals;
+    }
 
+    private ApplicationDeploymentDescription prepareSampleDeployment(String tag, String applicationModule, String computeResource) {
         CommandObject moduleLoadCmd = new CommandObject();
         moduleLoadCmd.setCommand("moduleLoadCmd");
         moduleLoadCmd.setCommandOrder(1);
@@ -105,90 +104,219 @@ public class ApplicationDeploymentRepositoryTest {
         postJobCommand.setCommand("postCommand");
         postJobCommand.setCommandOrder(3);
 
-        ApplicationDeploymentDescription testAppDeploymentDesc1 = new ApplicationDeploymentDescription();
-        testAppDeploymentDesc1.setAppDeploymentId("appDep1");
-        testAppDeploymentDesc1.setAppDeploymentDescription("test application deployment1");
-        testAppDeploymentDesc1.setAppModuleId(applicationModule.getAppModuleId());
-        testAppDeploymentDesc1.setComputeHostId(computeResourceId);
-        testAppDeploymentDesc1.setExecutablePath("executablePath1");
-        testAppDeploymentDesc1.setParallelism(ApplicationParallelismType.SERIAL);
-        testAppDeploymentDesc1.setModuleLoadCmds(Arrays.asList(moduleLoadCmd));
-        testAppDeploymentDesc1.setLibPrependPaths(Arrays.asList(libPrependPath));
-        testAppDeploymentDesc1.setLibAppendPaths(Arrays.asList(libAppendPath));
-        testAppDeploymentDesc1.setPreJobCommands(Arrays.asList(preJobCommand));
-        testAppDeploymentDesc1.setPostJobCommands(Arrays.asList(postJobCommand));
-        testAppDeploymentDesc1.setSetEnvironment(Arrays.asList(setEnvironment));
-        testAppDeploymentDesc1.setDefaultQueueName("queue1");
-        testAppDeploymentDesc1.setDefaultCPUCount(10);
-        testAppDeploymentDesc1.setDefaultNodeCount(5);
-        testAppDeploymentDesc1.setDefaultWalltime(15);
-        testAppDeploymentDesc1.setEditableByUser(true);
-
-        ApplicationDeploymentDescription testAppDeploymentDesc2 = new ApplicationDeploymentDescription();
-        testAppDeploymentDesc2.setAppDeploymentId("appDep2");
-        testAppDeploymentDesc2.setAppDeploymentDescription("test application deployment2");
-        testAppDeploymentDesc2.setAppModuleId(applicationModule.getAppModuleId());
-        testAppDeploymentDesc2.setComputeHostId(computeResourceId);
-        testAppDeploymentDesc2.setExecutablePath("executablePath1");
-        testAppDeploymentDesc2.setParallelism(ApplicationParallelismType.MPI);
-        testAppDeploymentDesc2.setModuleLoadCmds(Arrays.asList(moduleLoadCmd));
-        testAppDeploymentDesc2.setLibPrependPaths(Arrays.asList(libPrependPath));
-        testAppDeploymentDesc2.setLibAppendPaths(Arrays.asList(libAppendPath));
-        testAppDeploymentDesc2.setPreJobCommands(Arrays.asList(preJobCommand));
-        testAppDeploymentDesc2.setPostJobCommands(Arrays.asList(postJobCommand));
-        testAppDeploymentDesc2.setSetEnvironment(Arrays.asList(setEnvironment));
-        testAppDeploymentDesc2.setDefaultQueueName("queue2");
-        testAppDeploymentDesc2.setDefaultCPUCount(15);
-        testAppDeploymentDesc2.setDefaultNodeCount(10);
-        testAppDeploymentDesc2.setDefaultWalltime(5);
-        testAppDeploymentDesc2.setEditableByUser(false);
-
-        String testDeploymentId1 = applicationDeploymentRepository.addApplicationDeployment(testAppDeploymentDesc1, gatewayId);
-        ApplicationDeploymentDescription retrievedApplicationDeployment = null;
-        if(applicationDeploymentRepository.isExists(testDeploymentId1)) {
-            retrievedApplicationDeployment = applicationDeploymentRepository.getApplicationDeployement(testDeploymentId1);
-            assertTrue("Retrieved app deployment id matched", retrievedApplicationDeployment.getAppDeploymentId().equals("appDep1"));
-            assertEquals("test application deployment1", retrievedApplicationDeployment.getAppDeploymentDescription());
-            assertEquals(applicationModule.getAppModuleId(), retrievedApplicationDeployment.getAppModuleId());
-            assertEquals(computeResourceDescription.getComputeResourceId(), retrievedApplicationDeployment.getComputeHostId());
-            assertEquals("executablePath1", retrievedApplicationDeployment.getExecutablePath());
-            assertTrue(retrievedApplicationDeployment.getParallelism().equals(ApplicationParallelismType.SERIAL));
-        }
+        ApplicationDeploymentDescription deployment = new ApplicationDeploymentDescription();
+        deployment.setAppDeploymentId("appDep" + tag);
+        deployment.setAppDeploymentDescription("test application deployment" + tag);
+        deployment.setAppModuleId(applicationModule);
+        deployment.setComputeHostId(computeResource);
+        deployment.setExecutablePath("executablePath" + tag);
+        deployment.setParallelism(ApplicationParallelismType.SERIAL);
+        deployment.setModuleLoadCmds(new ArrayList<>(Arrays.asList(moduleLoadCmd)));
+        deployment.setLibPrependPaths(new ArrayList<>(Arrays.asList(libPrependPath)));
+        deployment.setLibAppendPaths(new ArrayList<>(Arrays.asList(libAppendPath)));
+        deployment.setPreJobCommands(new ArrayList<>(Arrays.asList(preJobCommand)));
+        deployment.setPostJobCommands(new ArrayList<>(Arrays.asList(postJobCommand)));
+        deployment.setSetEnvironment(new ArrayList<>(Arrays.asList(setEnvironment)));
+        deployment.setDefaultQueueName("queue" + tag);
+        deployment.setDefaultCPUCount(10);
+        deployment.setDefaultNodeCount(5);
+        deployment.setDefaultWalltime(15);
+        deployment.setEditableByUser(true);
+
+        return deployment;
+    }
 
-        String appDeploymentId = testAppDeploymentDesc1.getAppDeploymentId();
-        testAppDeploymentDesc1.setDefaultQueueName("queue3");
-        applicationDeploymentRepository.updateApplicationDeployment(appDeploymentId , testAppDeploymentDesc1);
-        assertTrue(applicationDeploymentRepository.getApplicationDeployement(appDeploymentId).getDefaultQueueName().equals("queue3"));
+    @Test
+    public void createAppDeploymentTest() throws AppCatalogException {
+
+        Assert.assertNull(applicationDeploymentRepository.getApplicationDeployement("appDep1"));
+        String applicationModule = addSampleApplicationModule("1");
+        String computeResource =  addSampleComputeResource("1");
+
+        ApplicationDeploymentDescription deployment = prepareSampleDeployment("1", applicationModule, computeResource);
+        String deploymentId = applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
+        ApplicationDeploymentDescription savedDeployment = applicationDeploymentRepository.getApplicationDeployement("appDep1");
+        Assert.assertNotNull(savedDeployment);
+        Assert.assertTrue(deepCompareDeployment(deployment, savedDeployment));
+    }
+
+    @Test
+    public void updateAppDeploymentTest() throws AppCatalogException {
+        String applicationModule = addSampleApplicationModule("1");
+        String computeResource =  addSampleComputeResource("1");
+
+        ApplicationDeploymentDescription deployment = prepareSampleDeployment("1", applicationModule, computeResource);
+
+        String deploymentId = applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
+
+        deployment.setDefaultQueueName("updated");
+        deployment.setAppDeploymentDescription("updated description");
+
+        CommandObject moduleLoadCmd = new CommandObject();
+        moduleLoadCmd.setCommand("moduleLoadCmd2");
+        moduleLoadCmd.setCommandOrder(2);
+
+        deployment.getModuleLoadCmds().add(moduleLoadCmd);
+
+        SetEnvPaths libPrependPath = new SetEnvPaths();
+        libPrependPath.setName("libPrependPath2");
+        libPrependPath.setValue("libPrependPathValue2");
+        libPrependPath.setEnvPathOrder(4);
+
+        deployment.getLibPrependPaths().add(libPrependPath);
+
+        deployment.setExecutablePath("executablePath2");
+        deployment.setParallelism(ApplicationParallelismType.MPI);
+        deployment.setDefaultCPUCount(12);
+        deployment.setDefaultNodeCount(15);
+        deployment.setDefaultWalltime(10);
+        deployment.setEditableByUser(false);
+
+        applicationDeploymentRepository.updateApplicationDeployment(deploymentId, deployment);
+
+        ApplicationDeploymentDescription updatedDeployment = applicationDeploymentRepository.getApplicationDeployement(deploymentId);
+
+        Assert.assertTrue(deepCompareDeployment(deployment, updatedDeployment));
+    }
+
+    @Test
+    public void listAllDeployments() throws AppCatalogException {
+
+        List<ApplicationDeploymentDescription> allDeployments = new ArrayList<>();
+
+        for (int i = 0 ; i < 5; i++) {
+            String applicationModule = addSampleApplicationModule(i + "");
+            String computeResource = addSampleComputeResource(i + "");
+            ApplicationDeploymentDescription deployment = prepareSampleDeployment(i + "", applicationModule, computeResource);
+            allDeployments.add(deployment);
+            String savedDeploymentId = applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
+            Assert.assertEquals(deployment.getAppDeploymentId(), savedDeploymentId);
+        }
 
-        String testDeploymentId2 = applicationDeploymentRepository.addApplicationDeployment(testAppDeploymentDesc2, gatewayId);
         List<ApplicationDeploymentDescription> appDeploymentList = applicationDeploymentRepository.getAllApplicationDeployements(gatewayId);
         List<String> appDeploymentIds = applicationDeploymentRepository.getAllApplicationDeployementIds();
-        assertTrue(appDeploymentList.size() == 2);
-        assertTrue(appDeploymentIds.size() == 2);
+
+        Assert.assertEquals(allDeployments.size(), appDeploymentList.size());
+        Assert.assertEquals(allDeployments.size(), appDeploymentIds.size());
+
+        for (int i = 0; i < allDeployments.size(); i++) {
+            Assert.assertTrue(deepCompareDeployment(allDeployments.get(i), appDeploymentList.get(i)));
+            Assert.assertEquals(allDeployments.get(i).getAppDeploymentId(), appDeploymentIds.get(i));
+        }
+    }
+
+    @Test
+    public void filterApplicationDeploymentsTest() throws AppCatalogException {
+
+        String applicationModule1 = addSampleApplicationModule("1");
+        String computeResource1 =  addSampleComputeResource("1");
+        String applicationModule2 = addSampleApplicationModule("2");
+        String computeResource2 =  addSampleComputeResource("2");
+
+        ApplicationDeploymentDescription deployment1 = prepareSampleDeployment( "1", applicationModule1, computeResource1);
+        ApplicationDeploymentDescription deployment2 = prepareSampleDeployment( "2", applicationModule1, computeResource2);
+        ApplicationDeploymentDescription deployment3 = prepareSampleDeployment( "3", applicationModule2, computeResource2);
+
+        applicationDeploymentRepository.saveApplicationDeployment(deployment1, gatewayId);
+        applicationDeploymentRepository.saveApplicationDeployment(deployment2, gatewayId);
+        applicationDeploymentRepository.saveApplicationDeployment(deployment3, gatewayId);
 
         Map<String, String> filters = new HashMap<>();
-        filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule.getAppModuleId());
-        filters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, computeResourceDescription.getComputeResourceId());
-        appDeploymentList = applicationDeploymentRepository.getApplicationDeployements(filters);
-        assertEquals(computeResourceDescription.getComputeResourceId(), appDeploymentList.get(0).getComputeHostId());
+        filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule1);
+        List<ApplicationDeploymentDescription> filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
+        Assert.assertEquals(2, filteredDeployments.size());
+        Assert.assertTrue(deepCompareDeployment(deployment1, filteredDeployments.get(0)));
+        Assert.assertTrue(deepCompareDeployment(deployment2, filteredDeployments.get(1)));
+
+        filters = new HashMap<>();
+        filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule2);
+        filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
+        Assert.assertEquals(1, filteredDeployments.size());
+        Assert.assertTrue(deepCompareDeployment(deployment3, filteredDeployments.get(0)));
+
+        filters = new HashMap<>();
+        filters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, computeResource1);
+        filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
+        Assert.assertEquals(1, filteredDeployments.size());
+        Assert.assertTrue(deepCompareDeployment(deployment1, filteredDeployments.get(0)));
+
+        filters = new HashMap<>();
+        filters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, computeResource2);
+        filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
+        Assert.assertEquals(2, filteredDeployments.size());
+        Assert.assertTrue(deepCompareDeployment(deployment2, filteredDeployments.get(0)));
+        Assert.assertTrue(deepCompareDeployment(deployment3, filteredDeployments.get(1)));
+
+        filters = new HashMap<>();
+        filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule1);
+        filters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, computeResource2);
+        filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
+        Assert.assertEquals(1, filteredDeployments.size());
+        Assert.assertTrue(deepCompareDeployment(deployment2, filteredDeployments.get(0)));
 
-        assertTrue(applicationDeploymentRepository.getAllApplicationDeployements(gatewayId).size() == 2);
+        filters = new HashMap<>();
+        filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule1);
+        filters.put("INVALID FIELD", computeResource2);
+        try {
+            filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
+            Assert.fail();
+        } catch (Exception e) {
+            // ignore
+        }
+    }
+
+    @Test
+    public void deleteApplicationDeploymentTest() throws AppCatalogException {
+
+        String applicationModule = addSampleApplicationModule("1");
+        String computeResource =  addSampleComputeResource("1");
+        ApplicationDeploymentDescription deployment = prepareSampleDeployment( "1", applicationModule, computeResource);
+
+        Assert.assertNull(applicationDeploymentRepository.getApplicationDeployement(deployment.getAppDeploymentId()));
+
+        applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
+        Assert.assertNotNull(applicationDeploymentRepository.getApplicationDeployement(deployment.getAppDeploymentId()));
+        applicationDeploymentRepository.removeAppDeployment(deployment.getAppDeploymentId());
+        Assert.assertNull(applicationInterfaceRepository.getApplicationInterface(deployment.getAppDeploymentId()));
+    }
+
+    @Test
+    public void accessibleDeploymentTest() throws AppCatalogException {
+        String applicationModule1 = addSampleApplicationModule("1");
+        String computeResource1 =  addSampleComputeResource("1");
+        String applicationModule2 = addSampleApplicationModule("2");
+        String computeResource2 =  addSampleComputeResource("2");
+
+        ApplicationDeploymentDescription deployment1 = prepareSampleDeployment( "1", applicationModule1, computeResource1);
+        ApplicationDeploymentDescription deployment2 = prepareSampleDeployment( "2", applicationModule1, computeResource2);
+        ApplicationDeploymentDescription deployment3 = prepareSampleDeployment( "3", applicationModule2, computeResource2);
+
+        applicationDeploymentRepository.saveApplicationDeployment(deployment1, gatewayId);
+        applicationDeploymentRepository.saveApplicationDeployment(deployment2, gatewayId);
+        applicationDeploymentRepository.saveApplicationDeployment(deployment3, gatewayId);
 
         List<String> accessibleAppIds = new ArrayList<>();
-        accessibleAppIds.add(testDeploymentId1);
-        accessibleAppIds.add(testDeploymentId2);
+        accessibleAppIds.add(deployment1.getAppDeploymentId());
+        accessibleAppIds.add(deployment2.getAppDeploymentId());
+        accessibleAppIds.add(deployment3.getAppDeploymentId());
+
         List<String> accessibleCompHostIds = new ArrayList<>();
-        accessibleCompHostIds.add(computeResourceId);
-        appDeploymentList = applicationDeploymentRepository.getAccessibleApplicationDeployements(gatewayId, accessibleAppIds, accessibleCompHostIds);
-        assertTrue(appDeploymentList.size() == 2);
-        assertEquals(testDeploymentId1, appDeploymentList.get(0).getAppDeploymentId());
+        accessibleCompHostIds.add(computeResource1);
 
-        applicationDeploymentRepository.removeAppDeployment(testAppDeploymentDesc2.getAppDeploymentId());
-        assertFalse(applicationDeploymentRepository.isExists(testAppDeploymentDesc2.getAppDeploymentId()));
+        List<ApplicationDeploymentDescription> accessibleApplicationDeployments = applicationDeploymentRepository
+                .getAccessibleApplicationDeployments(gatewayId, accessibleAppIds, accessibleCompHostIds);
 
-        computeResourceRepository.removeComputeResource(computeResourceDescription.getComputeResourceId());
+        assertTrue(accessibleApplicationDeployments.size() == 1);
+        assertTrue(deepCompareDeployment(deployment1, accessibleApplicationDeployments.get(0)));
 
-    }
+        accessibleCompHostIds = new ArrayList<>();
+        accessibleCompHostIds.add(computeResource2);
 
-}
+        accessibleApplicationDeployments = applicationDeploymentRepository
+                .getAccessibleApplicationDeployments(gatewayId, accessibleAppIds, accessibleCompHostIds);
 
+        assertTrue(accessibleApplicationDeployments.size() == 2);
+        assertTrue(deepCompareDeployment(deployment2, accessibleApplicationDeployments.get(0)));
+        assertTrue(deepCompareDeployment(deployment3, accessibleApplicationDeployments.get(1)));
+    }
+}
\ No newline at end of file
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepositoryTest.java
index 72bef72..77b7e87 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepositoryTest.java
@@ -23,14 +23,15 @@ import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentD
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.application.io.DataType;
 import org.apache.airavata.model.application.io.InputDataObjectType;
 import org.apache.airavata.model.application.io.OutputDataObjectType;
 import org.apache.airavata.model.parallelism.ApplicationParallelismType;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.core.utils.DBConstants;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.junit.After;
-import org.junit.Before;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -41,123 +42,436 @@ import static org.junit.Assert.assertFalse;
 
 import java.util.*;
 
-public class ApplicationInterfaceRepositoryTest {
+public class ApplicationInterfaceRepositoryTest extends TestBase {
+
+    private static final Logger logger = LoggerFactory.getLogger(ApplicationInterfaceRepositoryTest.class);
 
-    private static Initialize initialize;
     private ApplicationInterfaceRepository applicationInterfaceRepository;
     private ComputeResourceRepository computeResourceRepository;
     private ApplicationDeploymentRepository applicationDeploymentRepository;
     private String gatewayId = "testGateway";
-    private static final Logger logger = LoggerFactory.getLogger(ApplicationInterfaceRepositoryTest.class);
 
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            applicationInterfaceRepository = new ApplicationInterfaceRepository();
-            computeResourceRepository = new ComputeResourceRepository();
-            applicationDeploymentRepository = new ApplicationDeploymentRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
+    public ApplicationInterfaceRepositoryTest() {
+        super(TestBase.Database.APP_CATALOG);
+        computeResourceRepository = new ComputeResourceRepository();
+        applicationInterfaceRepository = new ApplicationInterfaceRepository();
+        applicationDeploymentRepository = new ApplicationDeploymentRepository();
+    }
+
+    @Test
+    public void addApplicationModuleTest() throws AppCatalogException {
+        ApplicationModule applicationModule = new ApplicationModule();
+        applicationModule.setAppModuleId("appMod1");
+        applicationModule.setAppModuleName("appMod1Name");
+        applicationModule.setAppModuleDescription("Description");
+        applicationModule.setAppModuleVersion("Version1");
+        String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+
+        ApplicationModule savedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(applicationModule, savedAppModule));
     }
 
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    @Test
+    public void addApplicationModuleWithEmptyIdTest() throws AppCatalogException {
+        ApplicationModule applicationModule = new ApplicationModule();
+        applicationModule.setAppModuleName("appMod1Name");
+        applicationModule.setAppModuleDescription("Description");
+        applicationModule.setAppModuleVersion("Version1");
+        String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+
+        ApplicationModule savedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
+        Assert.assertEquals(applicationModule.getAppModuleName(), savedAppModule.getAppModuleId());
+    }
+
+    @Test
+    public void deleteApplicationModuleTest() throws AppCatalogException {
+
+        Assert.assertNull(applicationInterfaceRepository.getApplicationModule("appMod1"));
+
+        ApplicationModule applicationModule = new ApplicationModule();
+        applicationModule.setAppModuleId("appMod1");
+        applicationModule.setAppModuleName("appMod1Name");
+        String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+        Assert.assertNotNull(applicationInterfaceRepository.getApplicationModule(moduleId));
+
+        Assert.assertTrue(applicationInterfaceRepository.removeApplicationModule("appMod1"));
+
+        Assert.assertNull(applicationInterfaceRepository.getApplicationModule("appMod1"));
+    }
+
+    @Test
+    public void updateApplicationModuleTest() throws AppCatalogException {
+        ApplicationModule applicationModule = new ApplicationModule();
+        applicationModule.setAppModuleId("appMod1");
+        applicationModule.setAppModuleName("appMod1Name");
+        applicationModule.setAppModuleDescription("Description");
+        applicationModule.setAppModuleVersion("Version1");
+        String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+
+        ApplicationModule savedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(applicationModule, savedAppModule));
+
+        savedAppModule.setAppModuleName("Updated Name");
+        savedAppModule.setAppModuleDescription("Updated Description");
+        savedAppModule.setAppModuleVersion("new version");
+
+        applicationInterfaceRepository.updateApplicationModule("appMod1", savedAppModule);
+
+        ApplicationModule updatedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(savedAppModule, updatedAppModule));
     }
 
     @Test
-    public void ApplicationInterfaceRepositoryTest() throws AppCatalogException {
+    public void addApplicationInterfaceTest() throws AppCatalogException {
         ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
         applicationInterfaceDescription.setApplicationInterfaceId("interface1");
         applicationInterfaceDescription.setApplicationName("app interface 1");
+        applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
+        applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
+        applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
 
         String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
         assertEquals(applicationInterfaceDescription.getApplicationInterfaceId(), interfaceId);
 
-        ComputeResourceDescription computeResourceDescription = new ComputeResourceDescription();
-        computeResourceDescription.setComputeResourceId("compHost1");
-        computeResourceDescription.setHostName("compHost1Name");
-        String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+        ApplicationInterfaceDescription savedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(applicationInterfaceDescription, savedInterface, "__isset_bitfield"));
+    }
 
-        ApplicationModule applicationModule = new ApplicationModule();
-        applicationModule.setAppModuleId("appMod1");
-        applicationModule.setAppModuleName("appMod1Name");
-        String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+    @Test
+    public void deleteApplicationInterfaceTest() throws AppCatalogException {
 
-        ApplicationDeploymentDescription applicationDeploymentDescription = new ApplicationDeploymentDescription();
-        applicationDeploymentDescription.setAppDeploymentId("appDep1");
-        applicationDeploymentDescription.setAppModuleId(moduleId);
-        applicationDeploymentDescription.setComputeHostId(computeResourceId);
-        applicationDeploymentDescription.setExecutablePath("executablePath");
-        applicationDeploymentDescription.setParallelism(ApplicationParallelismType.SERIAL);
-        String deploymentId = applicationDeploymentRepository.addApplicationDeployment(applicationDeploymentDescription, gatewayId);
+        Assert.assertNull(applicationInterfaceRepository.getApplicationModule("interface1"));
 
+        ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
+        applicationInterfaceDescription.setApplicationInterfaceId("interface1");
+        applicationInterfaceDescription.setApplicationName("app interface 1");
+        applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
+        applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
+        applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
+
+        String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
+
+        Assert.assertNotNull(applicationInterfaceRepository.getApplicationInterface(interfaceId));
+        Assert.assertTrue(applicationInterfaceRepository.removeApplicationInterface(interfaceId));
+        Assert.assertNull(applicationInterfaceRepository.getApplicationInterface(interfaceId));
+    }
+
+    @Test
+    public void addModulesToInterfaceTest() throws AppCatalogException {
         ApplicationModule applicationModule1 = new ApplicationModule();
-        applicationModule1.setAppModuleId("appMod2");
-        applicationModule1.setAppModuleName("appMod2Name");
+        applicationModule1.setAppModuleId("appMod1");
+        applicationModule1.setAppModuleName("appMod1Name");
         String moduleId1 = applicationInterfaceRepository.addApplicationModule(applicationModule1, gatewayId);
 
-        applicationInterfaceRepository.addApplicationModuleMapping(moduleId, interfaceId);
+        ApplicationModule applicationModule2 = new ApplicationModule();
+        applicationModule2.setAppModuleId("appMod2");
+        applicationModule2.setAppModuleName("appMod2Name");
+        String moduleId2 = applicationInterfaceRepository.addApplicationModule(applicationModule2, gatewayId);
+
+        ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
+        applicationInterfaceDescription.setApplicationInterfaceId("interface1");
+        applicationInterfaceDescription.setApplicationName("app interface 1");
+
+        String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
+
         applicationInterfaceRepository.addApplicationModuleMapping(moduleId1, interfaceId);
+        applicationInterfaceRepository.addApplicationModuleMapping(moduleId2, interfaceId);
+
+        ApplicationInterfaceDescription savedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
+
+        Assert.assertEquals(savedInterface.getApplicationModules().get(0), applicationModule1.getAppModuleId());
+        Assert.assertEquals(savedInterface.getApplicationModules().get(1), applicationModule2.getAppModuleId());
+    }
+
+    @Test
+    public void addInputsOutputsToInterfaceTest() throws AppCatalogException {
+
+        ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
+        applicationInterfaceDescription.setApplicationInterfaceId("interface1");
+        applicationInterfaceDescription.setApplicationName("app interface 1");
+
+        String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
 
         InputDataObjectType input = new InputDataObjectType();
         input.setName("input1");
+        input.setApplicationArgument("Arg");
+        input.setDataStaged(true);
+        input.setInputOrder(0);
+        input.setIsReadOnly(true);
+        input.setIsRequired(true);
+        input.setRequiredToAddedToCommandLine(true);
+        input.setType(DataType.FLOAT);
+        input.setUserFriendlyDescription("User friendly description");
+        input.setValue("113");
+        input.setMetaData("Metadata");
+        input.setStandardInput(true);
+        // TODO missing field
+        //input.setStorageResourceId("Storage resource id");
 
         OutputDataObjectType output = new OutputDataObjectType();
         output.setName("output1");
+        output.setValue("value");
+        output.setType(DataType.FLOAT);
+        output.setApplicationArgument("Argument");
+        output.setDataMovement(true);
+        output.setIsRequired(true);
+        output.setLocation("/home/");
+        output.setSearchQuery("Search query");
+        output.setRequiredToAddedToCommandLine(true);
+        output.setOutputStreaming(true);
+        // TODO missing field
+        //output.setStorageResourceId("Storage resource id");
+
+        applicationInterfaceDescription.setApplicationInputs(Collections.singletonList(input));
+        applicationInterfaceDescription.setApplicationOutputs(Collections.singletonList(output));
 
-        applicationInterfaceDescription.setApplicationInputs(Arrays.asList(input));
-        applicationInterfaceDescription.setApplicationOutputs(Arrays.asList(output));
         applicationInterfaceRepository.updateApplicationInterface(interfaceId, applicationInterfaceDescription);
-        ApplicationInterfaceDescription appDescription = applicationInterfaceRepository.getApplicationInterface(interfaceId);
-        assertTrue(appDescription.getApplicationInputs().size() == 1);
-        assertEquals(output.getName(), appDescription.getApplicationOutputs().get(0).getName());
 
-        applicationModule.setAppModuleVersion("1.0");
-        applicationInterfaceRepository.updateApplicationModule(moduleId, applicationModule);
-        ApplicationModule appModule = applicationInterfaceRepository.getApplicationModule(moduleId);
-        assertFalse(appModule.getAppModuleVersion() == null);
+        ApplicationInterfaceDescription savedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
+        Assert.assertEquals(1, savedInterface.getApplicationInputsSize());
+        Assert.assertEquals(1, savedInterface.getApplicationOutputsSize());
+
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(input, savedInterface.getApplicationInputs().get(0), "__isset_bitfield"));
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(output, savedInterface.getApplicationOutputs().get(0), "__isset_bitfield"));
+
+        List<InputDataObjectType> savedInputs = applicationInterfaceRepository.getApplicationInputs(interfaceId);
+        List<OutputDataObjectType> savedOutputs = applicationInterfaceRepository.getApplicationOutputs(interfaceId);
+
+        Assert.assertEquals(1, savedInputs.size());
+        Assert.assertEquals(1, savedOutputs.size());
+
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(input, savedInputs.get(0), "__isset_bitfield"));
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(output, savedOutputs.get(0), "__isset_bitfield"));
+
+    }
+
+    @Test
+    public void filterApplicationInterfacesTest() throws AppCatalogException {
+
+        List<ApplicationInterfaceDescription> interfaces = new ArrayList<>();
+        for (int i = 0 ;i < 5 ;i ++) {
+            ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
+            applicationInterfaceDescription.setApplicationInterfaceId("interface" + i);
+            applicationInterfaceDescription.setApplicationName("app interface " + i);
+            interfaces.add(applicationInterfaceDescription);
+            applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
+        }
+
+        for (ApplicationInterfaceDescription iface : interfaces) {
+            Map<String, String> filters = new HashMap<>();
+            filters.put(DBConstants.ApplicationInterface.APPLICATION_NAME, iface.getApplicationName());
+            assertEquals(iface.getApplicationName(), applicationInterfaceRepository.getApplicationInterfaces(filters).get(0).getApplicationName());
+        }
+    }
+
+    @Test
+    public void filterApplicationModulesTest() throws AppCatalogException {
+        List<ApplicationModule> modules = new ArrayList<>();
+        for (int i = 0 ;i < 5 ;i ++) {
+            ApplicationModule applicationModule = new ApplicationModule();
+            applicationModule.setAppModuleId("appMod" + i);
+            applicationModule.setAppModuleName("appMod1Name");
+            applicationModule.setAppModuleDescription("Description");
+            applicationModule.setAppModuleVersion("Version1");
+            modules.add(applicationModule);
+            applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+        }
+
+        for (ApplicationModule module : modules) {
+            Map<String, String> filters = new HashMap<>();
+            filters.put(DBConstants.ApplicationModule.APPLICATION_MODULE_NAME, module.getAppModuleName());
+            assertEquals(module.getAppModuleName(),
+                    applicationInterfaceRepository.getApplicationModules(filters).get(0).getAppModuleName());
+        }
+    }
+
+    @Test
+    public void filterModuleByWrongCategoryTest() throws AppCatalogException {
+
+        ApplicationModule applicationModule = new ApplicationModule();
+        applicationModule.setAppModuleId("appMod1");
+        applicationModule.setAppModuleName("appMod1Name");
+        applicationModule.setAppModuleDescription("Description");
+        applicationModule.setAppModuleVersion("Version1");
+        applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
+
+        Map<String, String> filters = new HashMap<>();
+        filters.put("INVALID KEY", applicationModule.getAppModuleName());
+        try {
+            applicationInterfaceRepository.getApplicationModules(filters).get(0).getAppModuleName();
+            Assert.fail("Expected to throw an exception");
+        } catch (IllegalArgumentException e) {
+            // ignore
+        }
+    }
+
+    @Test
+    public void filterInterfaceByWrongCategoryTest() throws AppCatalogException {
+
+        ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
+        applicationInterfaceDescription.setApplicationInterfaceId("interface1");
+        applicationInterfaceDescription.setApplicationName("app interface");
+        applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
 
         Map<String, String> filters = new HashMap<>();
-        filters.put(DBConstants.ApplicationInterface.APPLICATION_NAME, applicationInterfaceDescription.getApplicationName());
-        assertEquals(applicationInterfaceDescription.getApplicationName(),
-                applicationInterfaceRepository.getApplicationInterfaces(filters).get(0).getApplicationName());
+        filters.put("INVALID KEY", applicationInterfaceDescription.getApplicationName());
+        try {
+            applicationInterfaceRepository.getApplicationInterfaces(filters).get(0).getApplicationName();
+            Assert.fail("Expected to throw an exception");
+        } catch (IllegalArgumentException e) {
+            // ignore
+        }
+    }
+
+    @Test
+    public void getAccessibleApplicationModulesTest() throws AppCatalogException {
 
-        filters = new HashMap<>();
-        filters.put(DBConstants.ApplicationModule.APPLICATION_MODULE_NAME, applicationModule.getAppModuleName());
-        assertEquals(applicationModule.getAppModuleName(),
-                applicationInterfaceRepository.getApplicationModules(filters).get(0).getAppModuleName());
+        ComputeResourceDescription computeResourceDescription1 = new ComputeResourceDescription();
+        computeResourceDescription1.setComputeResourceId("compHost1");
+        computeResourceDescription1.setHostName("compHost1Name");
+        String computeResourceId1 = computeResourceRepository.addComputeResource(computeResourceDescription1);
 
-        List<String> accessibleAppIds = new ArrayList<>();
-        accessibleAppIds.add(deploymentId);
-        List<String> accessibleCompHostIds = new ArrayList<>();
-        accessibleCompHostIds.add(computeResourceId);
-        List<ApplicationModule> appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, accessibleAppIds, accessibleCompHostIds);
-        assertTrue(appModuleList.size() == 1);
-        assertEquals(moduleId, appModuleList.get(0).getAppModuleId());
+        ComputeResourceDescription computeResourceDescription2 = new ComputeResourceDescription();
+        computeResourceDescription2.setComputeResourceId("compHost2");
+        computeResourceDescription2.setHostName("compHost2Name");
+        String computeResourceId2 = computeResourceRepository.addComputeResource(computeResourceDescription2);
 
-        assertTrue(applicationInterfaceRepository.getAllApplicationInterfaces(gatewayId).size() == 1);
-        assertTrue(applicationInterfaceRepository.getAllApplicationModules(gatewayId).size() == 2);
-        assertEquals(interfaceId, applicationInterfaceRepository.getAllApplicationInterfaceIds().get(0));
+        ApplicationModule applicationModule1 = new ApplicationModule();
+        applicationModule1.setAppModuleId("appMod1");
+        applicationModule1.setAppModuleName("appMod1Name");
+        String moduleId1 = applicationInterfaceRepository.addApplicationModule(applicationModule1, gatewayId);
+
+        ApplicationModule applicationModule2 = new ApplicationModule();
+        applicationModule2.setAppModuleId("appMod2");
+        applicationModule2.setAppModuleName("appMod2Name");
+        String moduleId2 = applicationInterfaceRepository.addApplicationModule(applicationModule2, gatewayId);
 
-        assertEquals(input.getName(), applicationInterfaceRepository.getApplicationInputs(interfaceId).get(0).getName());
-        assertEquals(output.getName(), applicationInterfaceRepository.getApplicationOutputs(interfaceId).get(0).getName());
+        ApplicationDeploymentDescription applicationDeploymentDescription1 = new ApplicationDeploymentDescription();
+        applicationDeploymentDescription1.setAppDeploymentId("appDep1");
+        applicationDeploymentDescription1.setAppModuleId(moduleId1);
+        applicationDeploymentDescription1.setComputeHostId(computeResourceId1);
+        applicationDeploymentDescription1.setExecutablePath("executablePath");
+        applicationDeploymentDescription1.setParallelism(ApplicationParallelismType.SERIAL);
+        String deploymentId1 = applicationDeploymentRepository.addApplicationDeployment(applicationDeploymentDescription1, gatewayId);
 
-        applicationInterfaceRepository.removeApplicationInterface(interfaceId);
-        assertFalse(applicationInterfaceRepository.isApplicationInterfaceExists(interfaceId));
+        ApplicationDeploymentDescription applicationDeployement = applicationDeploymentRepository.getApplicationDeployement(deploymentId1);
 
-        applicationInterfaceRepository.removeApplicationModule(moduleId);
-        assertFalse(applicationInterfaceRepository.isApplicationModuleExists(moduleId));
+        ApplicationDeploymentDescription applicationDeploymentDescription2 = new ApplicationDeploymentDescription();
+        applicationDeploymentDescription2.setAppDeploymentId("appDep2");
+        applicationDeploymentDescription2.setAppModuleId(moduleId1);
+        applicationDeploymentDescription2.setComputeHostId(computeResourceId2);
+        applicationDeploymentDescription2.setExecutablePath("executablePath");
+        applicationDeploymentDescription2.setParallelism(ApplicationParallelismType.SERIAL);
+        String deploymentId2 = applicationDeploymentRepository.addApplicationDeployment(applicationDeploymentDescription2, gatewayId);
 
-        applicationInterfaceRepository.removeApplicationModule(moduleId1);
-        assertFalse(applicationInterfaceRepository.isApplicationModuleExists(moduleId1));
+        List<String> deploymentIds = new ArrayList<>();
+        deploymentIds.add(deploymentId1);
+        List<String> compHostIds = new ArrayList<>();
+        compHostIds.add(computeResourceId1);
+        List<ApplicationModule> appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, deploymentIds, compHostIds);
 
-        computeResourceRepository.removeComputeResource(computeResourceId);
+        assertEquals(1, appModuleList.size());
+        assertEquals(moduleId1, appModuleList.get(0).getAppModuleId());
 
+        deploymentIds = new ArrayList<>();
+        deploymentIds.add(deploymentId1);
+        compHostIds = new ArrayList<>();
+        compHostIds.add(computeResourceId2);
+        appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, deploymentIds, compHostIds);
+        assertEquals(0, appModuleList.size());
+
+        deploymentIds = new ArrayList<>();
+        deploymentIds.add(deploymentId2);
+        compHostIds = new ArrayList<>();
+        compHostIds.add(computeResourceId2);
+        appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, deploymentIds, compHostIds);
+        assertEquals(1, appModuleList.size());
+        assertEquals(moduleId1, appModuleList.get(0).getAppModuleId());
     }
 
+    @Test
+    public void getAllApplicationModulesByGatewayTest() throws AppCatalogException {
+        Map<String, List<ApplicationModule>> moduleStore = new HashMap<>();
+
+        for (int j = 0; j < 5; j++) {
+            List<ApplicationModule> modules = new ArrayList<>();
+            String gateway  = "gateway" + j;
+            for (int i = 0; i < 5; i++) {
+                ApplicationModule applicationModule = new ApplicationModule();
+                applicationModule.setAppModuleId(gateway + "appMod" + i);
+                applicationModule.setAppModuleName(gateway + "appMod1Name");
+                applicationModule.setAppModuleDescription(gateway + "Description");
+                applicationModule.setAppModuleVersion(gateway + "Version1");
+                modules.add(applicationModule);
+                applicationInterfaceRepository.addApplicationModule(applicationModule, gateway);
+            }
+            moduleStore.put(gateway, modules);
+        }
+
+        for (int j = 0; j < 5; j++) {
+            String gateway  = "gateway" + j;
+            List<ApplicationModule> allApplicationModules = applicationInterfaceRepository.getAllApplicationModules(gateway);
+
+            Assert.assertEquals(moduleStore.get(gateway).size(), allApplicationModules.size());
+            for (int i = 0; i < allApplicationModules.size(); i++) {
+                Assert.assertTrue(EqualsBuilder.reflectionEquals(moduleStore.get(gateway).get(i), allApplicationModules.get(i), "__isset_bitfield"));
+            }
+        }
+    }
+
+    @Test
+    public void getAllApplicationInterfacesByGatewayTest() throws AppCatalogException {
+        Map<String, List<ApplicationInterfaceDescription>> interfaceStore = new HashMap<>();
+
+        for (int j = 0; j < 5; j++) {
+            List<ApplicationInterfaceDescription> interfaces = new ArrayList<>();
+            String gateway  = "gateway" + j;
+            for (int i = 0; i < 5; i++) {
+                ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
+                applicationInterfaceDescription.setApplicationInterfaceId(gateway + "interface" + i);
+                applicationInterfaceDescription.setApplicationName(gateway + "app interface " + i);
+                applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
+                applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
+                applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
+                interfaces.add(applicationInterfaceDescription);
+                applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gateway);
+            }
+            interfaceStore.put(gateway, interfaces);
+        }
+
+        for (int j = 0; j < 5; j++) {
+            String gateway  = "gateway" + j;
+            List<ApplicationInterfaceDescription> allApplicationInterfaces = applicationInterfaceRepository.getAllApplicationInterfaces(gateway);
+
+            Assert.assertEquals(interfaceStore.get(gateway).size(), allApplicationInterfaces.size());
+            for (int i = 0; i < allApplicationInterfaces.size(); i++) {
+                Assert.assertTrue(EqualsBuilder.reflectionEquals(interfaceStore.get(gateway).get(i), allApplicationInterfaces.get(i), "__isset_bitfield"));
+            }
+        }
+    }
+
+    @Test
+    public void getAllApplicationInterfacesWithoutGatewayTest() throws AppCatalogException {
+
+        List<ApplicationInterfaceDescription> interfaces = new ArrayList<>();
+        for (int j = 0; j < 5; j++) {
+            String gateway  = "gateway" + j;
+            for (int i = 0; i < 5; i++) {
+                ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
+                applicationInterfaceDescription.setApplicationInterfaceId(gateway + "interface" + i);
+                applicationInterfaceDescription.setApplicationName(gateway + "app interface " + i);
+                applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
+                applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
+                applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
+                interfaces.add(applicationInterfaceDescription);
+                applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gateway);
+            }
+        }
+
+        List<String> allApplicationInterfaceIds = applicationInterfaceRepository.getAllApplicationInterfaceIds();
+        Assert.assertEquals(interfaces.size(), allApplicationInterfaceIds.size());
+        for (int i = 0; i < interfaces.size(); i++) {
+            Assert.assertEquals(interfaces.get(i).getApplicationInterfaceId(), allApplicationInterfaceIds.get(i));
+        }
+    }
 }
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepositoryTest.java
index 684e80f..da75de9 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepositoryTest.java
@@ -19,62 +19,425 @@
  */
 package org.apache.airavata.registry.core.repositories.appcatalog;
 
-import org.apache.airavata.model.appcatalog.computeresource.BatchQueue;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.appcatalog.computeresource.FileSystems;
-import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.data.movement.DataMovementInterface;
-import org.apache.airavata.model.data.movement.DataMovementProtocol;
-import org.apache.airavata.model.data.movement.GridFTPDataMovement;
-import org.apache.airavata.model.data.movement.SCPDataMovement;
-import org.apache.airavata.model.data.movement.SecurityProtocol;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
+import org.apache.airavata.model.appcatalog.computeresource.*;
+import org.apache.airavata.model.data.movement.*;
+import org.apache.airavata.model.parallelism.ApplicationParallelismType;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.core.utils.DBConstants;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.junit.After;
-import org.junit.Before;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
-import static org.junit.Assert.assertTrue;
+public class ComputeResourceRepositoryTest extends TestBase {
 
-public class ComputeResourceRepositoryTest {
+    private static final Logger logger = LoggerFactory.getLogger(ComputeResourceRepositoryTest.class);
 
-    private static Initialize initialize;
     private ComputeResourceRepository computeResourceRepository;
-    private static final Logger logger = LoggerFactory.getLogger(ComputeResourceRepositoryTest.class);
 
-    @Before
-    public void setUp() {
+    public ComputeResourceRepositoryTest() {
+        super(Database.APP_CATALOG);
+        computeResourceRepository = new ComputeResourceRepository();
+    }
+
+    @Test
+    public void removeBatchQueueTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(resourceJobManager);
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
+        String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
+        String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
+
+        ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
+
+        String savedComputeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+
+        List<BatchQueue> batchQueues = computeResourceDescription.getBatchQueues();
+        Assert.assertTrue(batchQueues.size() > 0);
+
+        computeResourceRepository.removeBatchQueue(savedComputeResourceId, batchQueues.get(0).getQueueName());
+
+        ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(savedComputeResourceId);
+
+        List<BatchQueue> updatedBatchQueues = updatedComputeResource.getBatchQueues();
+
+        Assert.assertEquals(batchQueues.size(), updatedBatchQueues.size() + 1);
+        Optional<BatchQueue> searchedInterfaceResult = updatedBatchQueues.stream()
+                .filter(queue -> queue.getQueueName().equals(batchQueues.get(0).getQueueName())).findFirst();
+
+        Assert.assertFalse(searchedInterfaceResult.isPresent());
+    }
+
+    @Test
+    public void removeDataMovementInterfaceTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(resourceJobManager);
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
+        String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
+        String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
+
+        ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
+
+        String savedComputeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+
+        List<DataMovementInterface> dataMovementInterfaces = computeResourceDescription.getDataMovementInterfaces();
+        Assert.assertTrue(dataMovementInterfaces.size() > 0);
+
+        computeResourceRepository.removeDataMovementInterface(savedComputeResourceId, dataMovementInterfaces.get(0).getDataMovementInterfaceId());
+
+        ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(savedComputeResourceId);
+
+        List<DataMovementInterface> updatedDataMovementInterfaces = updatedComputeResource.getDataMovementInterfaces();
+
+        Assert.assertEquals(dataMovementInterfaces.size(), updatedDataMovementInterfaces.size() + 1);
+        Optional<DataMovementInterface> searchedInterfaceResult = updatedDataMovementInterfaces.stream()
+                .filter(iface -> iface.getDataMovementInterfaceId().equals(dataMovementInterfaces.get(0).getDataMovementInterfaceId())).findFirst();
+
+        Assert.assertFalse(searchedInterfaceResult.isPresent());
+    }
+
+    @Test
+    public void removeJobSubmissionInterfaceTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(resourceJobManager);
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
+        String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
+        String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
+
+        ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
+
+        String savedComputeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+
+        List<JobSubmissionInterface> jobSubmissionInterfaces = computeResourceDescription.getJobSubmissionInterfaces();
+        Assert.assertTrue(jobSubmissionInterfaces.size() > 0);
+
+        computeResourceRepository.removeJobSubmissionInterface(savedComputeResourceId, jobSubmissionInterfaces.get(0).getJobSubmissionInterfaceId());
+
+        ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(savedComputeResourceId);
+
+        List<JobSubmissionInterface> updatedJobSubmissionInterfaces = updatedComputeResource.getJobSubmissionInterfaces();
+
+        Assert.assertEquals(jobSubmissionInterfaces.size(), updatedJobSubmissionInterfaces.size() + 1);
+        Optional<JobSubmissionInterface> searchedInterfaceResult = updatedJobSubmissionInterfaces.stream()
+                .filter(iface -> iface.getJobSubmissionInterfaceId().equals(jobSubmissionInterfaces.get(0).getJobSubmissionInterfaceId())).findFirst();
+
+        Assert.assertFalse(searchedInterfaceResult.isPresent());
+    }
+
+    @Test
+    public void listComputeResourcesTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(resourceJobManager);
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
+        String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
+        String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
+
+        List<String> allIds = new ArrayList<>();
+        List<ComputeResourceDescription> allComputeResources = new ArrayList<>();
+        Map<String, String> allComputeResourceMap = new HashMap<>();
+        for (int i = 0; i < 5; i++) {
+            ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
+            computeResourceDescription.setHostName("Host" + i);
+            computeResourceDescription.setEnabled((i%2 == 0));
+            String savedId = computeResourceRepository.addComputeResource(computeResourceDescription);
+            allIds.add(savedId);
+            allComputeResources.add(computeResourceDescription);
+            allComputeResourceMap.put(savedId, computeResourceDescription.getHostName());
+        }
+
+        List<ComputeResourceDescription> allSavedComputeResources = computeResourceRepository.getAllComputeResourceList();
+
+        Assert.assertEquals(5, allSavedComputeResources.size());
+        for (int i = 0; i < 5; i++) {
+            Assert.assertTrue(deepCompareComputeResourceDescription(allComputeResources.get(i), allSavedComputeResources.get(i)));
+        }
+
+        Map<String, String> allSavedComputeResourceIds = computeResourceRepository.getAllComputeResourceIdList();
+
+        Assert.assertEquals(5, allSavedComputeResourceIds.size());
+
+        for (String id : allIds) {
+            String host = allSavedComputeResourceIds.get(id);
+            Assert.assertNotNull(host);
+            Assert.assertEquals(allComputeResourceMap.get(id), host);
+        }
+
+        Map<String, String> allAvailableIds = computeResourceRepository.getAvailableComputeResourceIdList();
+
+        Assert.assertEquals(3, allAvailableIds.size());
+        Assert.assertNotNull(allAvailableIds.get(allIds.get(0)));
+        Assert.assertNotNull(allAvailableIds.get(allIds.get(2)));
+        Assert.assertNotNull(allAvailableIds.get(allIds.get(4)));
+    }
+
+    @Test
+    public void filterComputeResourcesTest() throws AppCatalogException {
+
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(resourceJobManager);
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
+        String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
+        String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
+        ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
+
+
+        Map<String, String> cfilters = new HashMap<String, String>();
+        cfilters.put(DBConstants.ComputeResource.HOST_NAME, "localhost");
+        List<ComputeResourceDescription> computeResourceList = computeResourceRepository.getComputeResourceList(cfilters);
+
+        Assert.assertEquals(0, computeResourceList.size());
+
+        String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+        computeResourceList = computeResourceRepository.getComputeResourceList(cfilters);
+
+        Assert.assertEquals(1, computeResourceList.size());
+
+        Assert.assertEquals(computeResourceId, computeResourceList.get(0).getComputeResourceId());
+
         try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            computeResourceRepository = new ComputeResourceRepository();
+            cfilters = new HashMap<String, String>();
+            cfilters.put("Invalid_filter", "localhost");
+            computeResourceRepository.getComputeResourceList(cfilters);
+            Assert.fail();
         } catch (Exception e) {
-            logger.error(e.getMessage(), e);
+            // ignore
         }
     }
 
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    @Test
+    public void updateComputeResourceTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(resourceJobManager);
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
+        String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
+        String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
+        ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
+
+        String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+
+        ComputeResourceDescription savedComputeResource = computeResourceRepository.getComputeResource(computeResourceId);
+        savedComputeResource.getHostAliases().add("New Alias");
+
+        BatchQueue batchQueue = new BatchQueue();
+        batchQueue.setQueueName("queue new ");
+        batchQueue.setQueueDescription("que1Desc new");
+        batchQueue.setMaxRunTime(16);
+        batchQueue.setMaxNodes(10);
+        batchQueue.setMaxProcessors(11);
+        batchQueue.setMaxJobsInQueue(5);
+        batchQueue.setMaxMemory(2005);
+        batchQueue.setCpuPerNode(7);
+        batchQueue.setDefaultNodeCount(11);
+        batchQueue.setDefaultCPUCount(3);
+        batchQueue.setDefaultWalltime(34);
+        batchQueue.setQueueSpecificMacros("Macros new");
+        batchQueue.setIsDefaultQueue(true);
+
+        savedComputeResource.getBatchQueues().add(batchQueue);
+        savedComputeResource.setCpusPerNode(43);
+        savedComputeResource.setDefaultWalltime(4343);
+
+        computeResourceRepository.updateComputeResource(computeResourceId, savedComputeResource);
+
+        ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(computeResourceId);
+        Assert.assertTrue(deepCompareComputeResourceDescription(savedComputeResource, updatedComputeResource));
     }
 
     @Test
-    public void ComputeResourceRepositoryTest() throws AppCatalogException {
+    public void addComputeResourceTest() throws AppCatalogException {
+
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(resourceJobManager);
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
+        String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
+        String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
+        ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
+
+        computeResourceDescription.setComputeResourceId("manually-entered-id");
+
+        Assert.assertNull(computeResourceRepository.getComputeResource("manually-entered-id"));
+        String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
+        Assert.assertEquals("manually-entered-id", computeResourceId);
+        Assert.assertTrue(computeResourceRepository.isComputeResourceExists(computeResourceId));
+        ComputeResourceDescription savedComputeResource = computeResourceRepository.getComputeResource("manually-entered-id");
+        Assert.assertNotNull(savedComputeResource);
+
+        Assert.assertTrue(deepCompareComputeResourceDescription(computeResourceDescription, savedComputeResource));
+    }
+
+
+    @Test
+    public void addResourceJobManagerTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        String jobManagerId = computeResourceRepository.addResourceJobManager(resourceJobManager);
+        ResourceJobManager savedJobManager = computeResourceRepository.getResourceJobManager(jobManagerId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(resourceJobManager, savedJobManager, "__isset_bitfield"));
+
+    }
+
+    @Test
+    public void deleteResourceJobManagerTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        String jobManagerId = computeResourceRepository.addResourceJobManager(resourceJobManager);
+
+        Assert.assertNotNull(computeResourceRepository.getResourceJobManager(jobManagerId));
+        computeResourceRepository.deleteResourceJobManager(jobManagerId);
+        Assert.assertNull(computeResourceRepository.getResourceJobManager(jobManagerId));
+
+    }
+    @Test
+    public void updateResourceJobManagerTest() throws AppCatalogException {
+        ResourceJobManager resourceJobManager = prepareResourceJobManager();
+        String jobManagerId = computeResourceRepository.addResourceJobManager(resourceJobManager);
+        ResourceJobManager savedJobManager = computeResourceRepository.getResourceJobManager(jobManagerId);
+
+        savedJobManager.setJobManagerBinPath("/new bin");
+        savedJobManager.getJobManagerCommands().put(JobManagerCommand.SHOW_START, "New Command Value");
+        savedJobManager.getParallelismPrefix().put(ApplicationParallelismType.MPI, "MPI Type");
+
+        computeResourceRepository.updateResourceJobManager(jobManagerId, savedJobManager);
+
+        ResourceJobManager updatedJobManager = computeResourceRepository.getResourceJobManager(jobManagerId);
+
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(savedJobManager, updatedJobManager, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addUnicoreJobSubmissionTest() throws AppCatalogException {
+        UnicoreJobSubmission unicoreJobSubmission = prepareUnicoreJobSubmission();
+        String savedSubmissionId = computeResourceRepository.addUNICOREJobSubmission(unicoreJobSubmission);
+        UnicoreJobSubmission savedSubmission = computeResourceRepository.getUNICOREJobSubmission(savedSubmissionId);
+
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(unicoreJobSubmission, savedSubmission, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addCloudJobSubmissionTest() throws AppCatalogException {
+        CloudJobSubmission cloudJobSubmission = prepareCloudJobSubmission();
+        String savedSubmissionId = computeResourceRepository.addCloudJobSubmission(cloudJobSubmission);
+        CloudJobSubmission savedSubmission = computeResourceRepository.getCloudJobSubmission(savedSubmissionId);
+
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(cloudJobSubmission, savedSubmission, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addLocalJobSubmissionTest() throws AppCatalogException {
+        ResourceJobManager jobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(jobManager);
+
+        LOCALSubmission localSubmission = prepareLocalJobSubmission(jobManager);
+        String savedSubmissionId = computeResourceRepository.addLocalJobSubmission(localSubmission);
+        LOCALSubmission savedSubmission = computeResourceRepository.getLocalJobSubmission(savedSubmissionId);
+
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(localSubmission, savedSubmission, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addSSHJobSubmissionTest() throws AppCatalogException {
+        ResourceJobManager jobManager = prepareResourceJobManager();
+        computeResourceRepository.addResourceJobManager(jobManager);
+
+        SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(jobManager);
+        String jobSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
+        SSHJobSubmission savedJobSubmission = computeResourceRepository.getSSHJobSubmission(jobSubmissionId);
+
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(sshJobSubmission, savedJobSubmission, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addSCPDataMovementTest() throws AppCatalogException {
+        SCPDataMovement scpDataMovement = prepareScpDataMovement();
+        String dataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
+
+        SCPDataMovement savedDataMovement = computeResourceRepository.getSCPDataMovement(dataMovementId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(scpDataMovement, savedDataMovement, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addLocalDataMovementTest() throws AppCatalogException {
+        LOCALDataMovement localDataMovement = prepareLocalDataMovement();
+        String dataMovementId = computeResourceRepository.addLocalDataMovement(localDataMovement);
+
+        LOCALDataMovement savedDataMovement = computeResourceRepository.getLocalDataMovement(dataMovementId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(localDataMovement, savedDataMovement, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addUnicoreDataMovementTest() throws AppCatalogException {
+        UnicoreDataMovement unicoreDataMovement = prepareUnicoreDataMovement();
+        String dataMovementId = computeResourceRepository.addUnicoreDataMovement(unicoreDataMovement);
+
+        UnicoreDataMovement savedDataMovement = computeResourceRepository.getUNICOREDataMovement(dataMovementId);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(unicoreDataMovement, savedDataMovement, "__isset_bitfield"));
+    }
+
+    @Test
+    public void addGridFTPDataMovementTest() throws AppCatalogException {
+        GridFTPDataMovement gridFTPDataMovement1 = prepareGridFTPDataMovement("222.33.43.444", "23.344.44.454");
+        String dataMovementId1 = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement1);
+        GridFTPDataMovement savedDataMovement1 = computeResourceRepository.getGridFTPDataMovement(dataMovementId1);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(gridFTPDataMovement1, savedDataMovement1, "__isset_bitfield"));
+
+        GridFTPDataMovement gridFTPDataMovement2 = prepareGridFTPDataMovement("222.33.43.445", "23.344.44.400");
+        String dataMovementId2 = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement2);
+        GridFTPDataMovement savedDataMovement2 = computeResourceRepository.getGridFTPDataMovement(dataMovementId2);
+        Assert.assertTrue(EqualsBuilder.reflectionEquals(gridFTPDataMovement2, savedDataMovement2, "__isset_bitfield"));
+
+    }
+
+    @Test
+    public void fetchNotAvailableResourceTest() throws AppCatalogException {
+        Assert.assertNull(computeResourceRepository.getResourceJobManager("INVALID ID"));
+        Assert.assertNull(computeResourceRepository.getComputeResource("INVALID ID"));
+        Assert.assertNull(computeResourceRepository.getCloudJobSubmission("INVALID ID"));
+        Assert.assertEquals(0, computeResourceRepository.getFileSystems("INVALID ID").size());
+        Assert.assertNull(computeResourceRepository.getGridFTPDataMovement("INVALID ID"));
+        Assert.assertNull(computeResourceRepository.getLocalDataMovement("INVALID ID"));
+        Assert.assertNull(computeResourceRepository.getLocalJobSubmission("INVALID ID"));
+        Assert.assertNull(computeResourceRepository.getSCPDataMovement("INVALID ID"));
+        Assert.assertNull(computeResourceRepository.getUNICOREDataMovement("INVALID ID"));
+    }
+
+    private ComputeResourceDescription prepareComputeResource(String sshSubmissionId, String scpDataMoveId, String gridFTPDataMoveId, int batchQueueCount) {
         ComputeResourceDescription description = new ComputeResourceDescription();
 
         description.setHostName("localhost");
@@ -84,36 +447,15 @@ public class ComputeResourceRepositoryTest {
         ipdaresses.add("222.33.43.444");
         ipdaresses.add("23.344.44.454");
         description.setIpAddresses(ipdaresses);
-        String sshsubmissionId = addSSHJobSubmission();
-
-        // Verify SSHJobSubmission
-        SSHJobSubmission getSSHJobSubmission = computeResourceRepository.getSSHJobSubmission(sshsubmissionId);
-        assertTrue(sshsubmissionId.equals(getSSHJobSubmission.getJobSubmissionInterfaceId()));
-        assertTrue(MonitorMode.POLL_JOB_MANAGER.toString().equals(getSSHJobSubmission.getMonitorMode().toString()));
-        assertTrue(getSSHJobSubmission.getResourceJobManager().getJobManagerCommands().size() == 2);
 
         JobSubmissionInterface sshSubmissionInt = new JobSubmissionInterface();
-        sshSubmissionInt.setJobSubmissionInterfaceId(sshsubmissionId);
+        sshSubmissionInt.setJobSubmissionInterfaceId(sshSubmissionId);
         sshSubmissionInt.setPriorityOrder(1);
         sshSubmissionInt.setJobSubmissionProtocol(JobSubmissionProtocol.SSH);
         List<JobSubmissionInterface> interfaceList = new ArrayList<JobSubmissionInterface>();
         interfaceList.add(sshSubmissionInt);
         description.setJobSubmissionInterfaces(interfaceList);
 
-        // Verify SCP Datamovement
-        String scpDataMoveId = addSCPDataMovement();
-        SCPDataMovement scpDataMovement = computeResourceRepository.getSCPDataMovement(scpDataMoveId);
-        System.out.println("**********SCP Data Move Security protocol ************* : " + scpDataMovement.getSecurityProtocol().toString());
-        assertTrue(scpDataMoveId.equals(scpDataMovement.getDataMovementInterfaceId()));
-        assertTrue(SecurityProtocol.SSH_KEYS.toString().equals(scpDataMovement.getSecurityProtocol().toString()));
-
-        // Verify Grid FTP
-        String gridFTPDataMoveId = addGridFTPDataMovement();
-        GridFTPDataMovement gridFTPDataMovement = computeResourceRepository.getGridFTPDataMovement(gridFTPDataMoveId);
-        System.out.println("**********GRID FTP Data Move Security protocol ************* : " + gridFTPDataMovement.getSecurityProtocol().toString());
-        assertTrue(gridFTPDataMoveId.equals(gridFTPDataMovement.getDataMovementInterfaceId()));
-        assertTrue(gridFTPDataMovement.getSecurityProtocol().toString().equals(SecurityProtocol.SSH_KEYS.toString()));
-
         List<DataMovementInterface> dataMovementInterfaces = new ArrayList<DataMovementInterface>();
         DataMovementInterface scpInterface = new DataMovementInterface();
         scpInterface.setDataMovementInterfaceId(scpDataMoveId);
@@ -130,23 +472,25 @@ public class ComputeResourceRepositoryTest {
 
         description.setDataMovementInterfaces(dataMovementInterfaces);
 
-        BatchQueue batchQueue1 = new BatchQueue();
-        batchQueue1.setQueueName("queue1");
-        batchQueue1.setQueueDescription("que1Desc1");
-        batchQueue1.setMaxRunTime(10);
-        batchQueue1.setMaxNodes(4);
-        batchQueue1.setMaxJobsInQueue(1);
-
-        BatchQueue batchQueue2 = new BatchQueue();
-        batchQueue2.setQueueName("queue2");
-        batchQueue2.setQueueDescription("que1Desc2");
-        batchQueue2.setMaxRunTime(10);
-        batchQueue2.setMaxNodes(4);
-        batchQueue2.setMaxJobsInQueue(1);
-
         List<BatchQueue> batchQueueList = new ArrayList<BatchQueue>();
-        batchQueueList.add(batchQueue1);
-        batchQueueList.add(batchQueue2);
+
+        for (int i = 0; i < batchQueueCount; i++) {
+            BatchQueue batchQueue = new BatchQueue();
+            batchQueue.setQueueName("queue" + i);
+            batchQueue.setQueueDescription("que1Desc" + i);
+            batchQueue.setMaxRunTime(10 + i);
+            batchQueue.setMaxNodes(4 + i);
+            batchQueue.setMaxProcessors(5 + i);
+            batchQueue.setMaxJobsInQueue(i);
+            batchQueue.setMaxMemory(2000 + i);
+            batchQueue.setCpuPerNode(1 + i);
+            batchQueue.setDefaultNodeCount(3 + i);
+            batchQueue.setDefaultCPUCount(15 + i);
+            batchQueue.setDefaultWalltime(2 + i);
+            batchQueue.setQueueSpecificMacros("Macros " + i);
+            batchQueue.setIsDefaultQueue(i == 0);
+            batchQueueList.add(batchQueue);
+        }
         description.setBatchQueues(batchQueueList);
 
         Map<FileSystems, String> fileSysMap = new HashMap<FileSystems, String>();
@@ -154,111 +498,138 @@ public class ComputeResourceRepositoryTest {
         fileSysMap.put(FileSystems.SCRATCH, "/tmp");
         description.setFileSystems(fileSysMap);
 
+        description.setHostAliases(new ArrayList<>());
 
-        // Verify add/update compute resource
-        String resourceId = computeResourceRepository.addComputeResource(description);
-        ComputeResourceDescription host = null;
-
-        if (computeResourceRepository.isComputeResourceExists(resourceId)){
-            host = computeResourceRepository.getComputeResource(resourceId);
-            List<BatchQueue> batchQueues = host.getBatchQueues();
-            // check batch queue size
-            assertTrue(batchQueues.size() == 2);
-            for (BatchQueue queue : batchQueues){
-                System.out.println("%%%%%%%%%%%%%%%% queue description :  %%%%%%%%%%%%%%%%%%% : " + queue.getQueueDescription());
-            }
+        return description;
+    }
 
-            // verify host aliases
-            List<String> hostAliases = host.getHostAliases();
-            assertTrue(hostAliases.size() == 0);
-            if (hostAliases != null && !hostAliases.isEmpty()){
-                for (String alias : hostAliases){
-                    System.out.println("%%%%%%%%%%%%%%%% alias value :  %%%%%%%%%%%%%%%%%%% : " + alias);
-                }
-            }
-            host.addToHostAliases("abc");
-            computeResourceRepository.updateComputeResource(resourceId, host);
-            List<String> hostAliases1 = computeResourceRepository.getComputeResource(resourceId).getHostAliases();
-            assertTrue(hostAliases1.size() == 1);
-            for (String alias : hostAliases1){
-                System.out.println("%%%%%%%%%%%%%%%% alias value :  %%%%%%%%%%%%%%%%%%% : " + alias);
-            }
-            System.out.println("**********Resource name ************* : " +  host.getHostName());
-            assertTrue(host.getHostName().equals("localhost"));
-            assertTrue(host.isGatewayUsageReporting());
-        }
+    private ResourceJobManager prepareResourceJobManager() {
+        ResourceJobManager jobManager = new ResourceJobManager();
+        jobManager.setResourceJobManagerType(ResourceJobManagerType.PBS);
+        jobManager.setPushMonitoringEndpoint("monitor ep");
+        jobManager.setJobManagerBinPath("/bin");
 
-        // Verify updating compute resource
-        description.setHostName("localhost2");
-        computeResourceRepository.updateComputeResource(resourceId, description);
-        if (computeResourceRepository.isComputeResourceExists(resourceId)){
-            host = computeResourceRepository.getComputeResource(resourceId);
-            assertTrue(host.getHostName().equals("localhost2"));
-            System.out.println("**********Updated Resource name ************* : " +  host.getHostName());
-        }
+        Map<ApplicationParallelismType, String> parallelismPrefix =  new HashMap<>();
+        parallelismPrefix.put(ApplicationParallelismType.CCM, "ccm parallel");
+        jobManager.setParallelismPrefix(parallelismPrefix);
 
-        Map<String, String> cfilters = new HashMap<String, String>();
-        cfilters.put(DBConstants.ComputeResource.HOST_NAME, "localhost2");
-        List<ComputeResourceDescription> computeResourceList = computeResourceRepository.getComputeResourceList(cfilters);
-        assertTrue(computeResourceList.size() == 1);
-        System.out.println("**********Size of compute resources ************* : " +  computeResourceList.size());
+        Map<JobManagerCommand, String> commands = new HashMap<JobManagerCommand, String>();
+        commands.put(JobManagerCommand.SUBMISSION, "Sub command");
+        commands.put(JobManagerCommand.SHOW_QUEUE, "show q command");
+        jobManager.setJobManagerCommands(commands);
+        return jobManager;
+    }
 
-        List<ComputeResourceDescription> allComputeResourceList = computeResourceRepository.getAllComputeResourceList();
-        assertTrue(allComputeResourceList.size() == 1);
-        System.out.println("**********Size of all compute resources ************* : " +  allComputeResourceList.size());
+    private UnicoreJobSubmission prepareUnicoreJobSubmission() {
+        UnicoreJobSubmission unicoreJobSubmission = new UnicoreJobSubmission();
+        unicoreJobSubmission.setSecurityProtocol(SecurityProtocol.KERBEROS);
+        unicoreJobSubmission.setUnicoreEndPointURL("http://endpoint");
+        return unicoreJobSubmission;
+    }
 
-        Map<String, String> allComputeResourceIdList = computeResourceRepository.getAllComputeResourceIdList();
-        assertTrue(allComputeResourceIdList.size() == 1);
-        System.out.println("**********Size of all compute resources ids ************* : " +  allComputeResourceIdList.size());
+    private CloudJobSubmission prepareCloudJobSubmission() {
+        CloudJobSubmission cloudJobSubmission = new CloudJobSubmission();
+        cloudJobSubmission.setExecutableType("Executable");
+        cloudJobSubmission.setProviderName(ProviderName.EC2);
+        cloudJobSubmission.setNodeId("ec2 node");
+        cloudJobSubmission.setSecurityProtocol(SecurityProtocol.KERBEROS);
+        cloudJobSubmission.setUserAccountName("user1");
+        return cloudJobSubmission;
+    }
 
-        assertTrue("Compute resource save successfully", host != null);
+    private LOCALSubmission prepareLocalJobSubmission(ResourceJobManager jobManager) {
+        LOCALSubmission localSubmission = new LOCALSubmission();
+        localSubmission.setResourceJobManager(jobManager);
+        localSubmission.setSecurityProtocol(SecurityProtocol.KERBEROS);
+        return localSubmission;
     }
 
-    public String addSSHJobSubmission() throws AppCatalogException {
+    private SSHJobSubmission prepareSSHJobSubmission(ResourceJobManager jobManager) {
         SSHJobSubmission jobSubmission = new SSHJobSubmission();
         jobSubmission.setSshPort(22);
         jobSubmission.setSecurityProtocol(SecurityProtocol.GSI);
-        ResourceJobManager jobManager = new ResourceJobManager();
-        jobManager.setResourceJobManagerType(ResourceJobManagerType.PBS);
-        jobManager.setPushMonitoringEndpoint("monitor ep");
-        jobManager.setJobManagerBinPath("/bin");
-        Map<JobManagerCommand, String> commands = new HashMap<JobManagerCommand, String>();
-        commands.put(JobManagerCommand.SUBMISSION, "Sub command");
-        commands.put(JobManagerCommand.SHOW_QUEUE, "show q command");
-        jobManager.setJobManagerCommands(commands);
-        String jobManagerID = computeResourceRepository.addResourceJobManager(jobManager);
-        jobManager.setResourceJobManagerId(jobManagerID);
         jobSubmission.setMonitorMode(MonitorMode.POLL_JOB_MANAGER);
         jobSubmission.setResourceJobManager(jobManager);
+        return jobSubmission;
+    }
 
-        return computeResourceRepository.addSSHJobSubmission(jobSubmission);
+    private LOCALDataMovement prepareLocalDataMovement() {
+        return new LOCALDataMovement();
     }
 
-    public String addSCPDataMovement (){
-        try {
-            SCPDataMovement dataMovement = new SCPDataMovement();
-            dataMovement.setSshPort(22);
-            dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
-            return computeResourceRepository.addScpDataMovement(dataMovement);
-        }catch (AppCatalogException e) {
-            logger.error(e.getMessage(), e);
-        }
-        return null;
+    private SCPDataMovement prepareScpDataMovement() {
+        SCPDataMovement dataMovement = new SCPDataMovement();
+        dataMovement.setSshPort(22);
+        dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
+        return dataMovement;
     }
 
-    public String addGridFTPDataMovement (){
-        try {
-            GridFTPDataMovement dataMovement = new GridFTPDataMovement();
-            dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
-            List<String> endPoints = new ArrayList<String>();
-            endPoints.add("222.33.43.444");
-            endPoints.add("23.344.44.454");
-            dataMovement.setGridFTPEndPoints(endPoints);
-            return computeResourceRepository.addGridFTPDataMovement(dataMovement);
-        }catch (AppCatalogException e) {
-            logger.error(e.getMessage(), e);
-        }
-        return null;
+    private UnicoreDataMovement prepareUnicoreDataMovement() {
+        UnicoreDataMovement dataMovement = new UnicoreDataMovement();
+        dataMovement.setSecurityProtocol(SecurityProtocol.KERBEROS);
+        dataMovement.setUnicoreEndPointURL("http://endpoint");
+        return dataMovement;
     }
 
+    private GridFTPDataMovement prepareGridFTPDataMovement(String... endpoints) {
+        GridFTPDataMovement dataMovement = new GridFTPDataMovement();
+        dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
+        List<String> endPoints = new ArrayList<String>();
+        endPoints.addAll(endPoints);
+        dataMovement.setGridFTPEndPoints(endPoints);
+        return dataMovement;
+    }
+
+    private boolean deepCompareComputeResourceDescription(ComputeResourceDescription expected, ComputeResourceDescription actual) {
+        boolean equals = EqualsBuilder.reflectionEquals(expected, actual,
+                "__isset_bitfield", "batchQueues", "fileSystems", "jobSubmissionInterfaces", "dataMovementInterfaces", "ipAddresses", "hostAliases");
+
+        equals = equals & deepCompareArrayList(expected.getBatchQueues(), actual.getBatchQueues(), false);
+        equals = equals & deepCompareArrayList(expected.getJobSubmissionInterfaces(), actual.getJobSubmissionInterfaces(), false);
+        equals = equals & deepCompareArrayList(expected.getDataMovementInterfaces(), actual.getDataMovementInterfaces(), false);
+        equals = equals & deepCompareArrayList(expected.getIpAddresses(), actual.getIpAddresses(), false);
+        equals = equals & deepCompareArrayList(expected.getHostAliases(), actual.getHostAliases(), false);
+        return equals;
+    }
+
+    private boolean deepCompareArrayList(List expected, List actual, boolean preferOrder) {
+        if ((expected == null) == (actual == null)) {
+            if (expected == null) {
+                return true;
+            }
+
+            if (expected.size() != actual.size()) {
+                return false;
+            }
+
+            boolean equals = true;
+            if (preferOrder) {
+                for (int i = 0; i < expected.size(); i++) {
+                    equals = equals & EqualsBuilder.reflectionEquals(expected.get(i), actual.get(i), "__isset_bitfield");
+                }
+            } else {
+                boolean checked[] = new boolean[expected.size()];
+                for (int i = 0; i < expected.size(); i++) {
+                    equals = false;
+                    for (int j = 0; j < expected.size(); j++) {
+                        if (checked[j]) {
+                            continue;
+                        }
+                        equals = equals | EqualsBuilder.reflectionEquals(expected.get(i), actual.get(j), "__isset_bitfield");
+                        if (equals) {
+                            checked[j] = true;
+                            break;
+                        }
+                    }
+
+                    if (!equals) {
+                        break;
+                    }
+                }
+            }
+            return equals;
+        } else {
+            return false;
+        }
+    }
 }
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayGroupsRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayGroupsRepositoryTest.java
index 74355c9..9e2e378 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayGroupsRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayGroupsRepositoryTest.java
@@ -21,40 +21,25 @@
 package org.apache.airavata.registry.core.repositories.appcatalog;
 
 import org.apache.airavata.model.appcatalog.gatewaygroups.GatewayGroups;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
-import org.junit.After;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.junit.Assert;
-import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class GatewayGroupsRepositoryTest {
+public class GatewayGroupsRepositoryTest extends TestBase {
 
     private static final String GATEWAY_ID = "gateway-id";
     private static final String ADMIN_GROUPS_ID = "admin-groups-id";
     private static final String READ_ONLY_ADMINS_GROUP_ID = "read-only-admins-group-id";
     private static final String DEFAULT_GATEWAY_USERS_GROUP_ID = "default-gateway-users-group-id";
 
-    private static Initialize initialize;
     private GatewayGroupsRepository gatewayGroupsRepository;
     private static final Logger logger = LoggerFactory.getLogger(GatewayProfileRepositoryTest.class);
 
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            gatewayGroupsRepository = new GatewayGroupsRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    public GatewayGroupsRepositoryTest() {
+        super(Database.APP_CATALOG);
+        gatewayGroupsRepository = new GatewayGroupsRepository();
     }
 
     @Test
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayProfileRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayProfileRepositoryTest.java
index 6547c85..99e90fe 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayProfileRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayProfileRepositoryTest.java
@@ -24,10 +24,8 @@ import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtoco
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
 import org.apache.airavata.model.data.movement.DataMovementProtocol;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -39,27 +37,15 @@ import java.util.Map;
 
 import static org.junit.Assert.assertTrue;
 
-public class GatewayProfileRepositoryTest {
+public class GatewayProfileRepositoryTest extends TestBase {
 
-    private static Initialize initialize;
-    private GwyResourceProfileRepository gwyResourceProfileRepository;
     private static final Logger logger = LoggerFactory.getLogger(GatewayProfileRepositoryTest.class);
 
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            gwyResourceProfileRepository = new GwyResourceProfileRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
+    private GwyResourceProfileRepository gwyResourceProfileRepository;
 
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    public GatewayProfileRepositoryTest() {
+        super(Database.APP_CATALOG);
+        gwyResourceProfileRepository = new GwyResourceProfileRepository();
     }
 
     @Test
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GroupResourceProfileRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GroupResourceProfileRepositoryTest.java
index 49dd277..80cff5d 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GroupResourceProfileRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GroupResourceProfileRepositoryTest.java
@@ -22,10 +22,8 @@ package org.apache.airavata.registry.core.repositories.appcatalog;
 import org.apache.airavata.model.appcatalog.computeresource.BatchQueue;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.appcatalog.groupresourceprofile.*;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,32 +33,19 @@ import java.util.List;
 
 import static org.junit.Assert.assertTrue;
 
-public class GroupResourceProfileRepositoryTest {
+public class GroupResourceProfileRepositoryTest extends TestBase {
+
+    private static final Logger logger = LoggerFactory.getLogger(ComputeResourceRepository.class);
 
-    private static Initialize initialize;
     private ComputeResourceRepository computeResourceRepository;
     private GroupResourceProfileRepository groupResourceProfileRepository;
     private String gatewayId = "TEST_GATEWAY";
     private String groupResourceProfileId = null;
-    private static final Logger logger = LoggerFactory.getLogger(ComputeResourceRepository.class);
 
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            computeResourceRepository = new ComputeResourceRepository();
-            groupResourceProfileRepository = new GroupResourceProfileRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        groupResourceProfileRepository.removeGroupResourceProfile(groupResourceProfileId);
-        initialize.stopDerbyServer();
+    public GroupResourceProfileRepositoryTest() {
+        super(Database.APP_CATALOG);
+        computeResourceRepository = new ComputeResourceRepository();
+        groupResourceProfileRepository = new GroupResourceProfileRepository();
     }
 
     @Test
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/StorageResourceRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/StorageResourceRepositoryTest.java
index 17d1948..9de8702 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/StorageResourceRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/StorageResourceRepositoryTest.java
@@ -25,10 +25,8 @@ import org.apache.airavata.model.data.movement.DataMovementProtocol;
 import org.apache.airavata.model.data.movement.GridFTPDataMovement;
 import org.apache.airavata.model.data.movement.SCPDataMovement;
 import org.apache.airavata.model.data.movement.SecurityProtocol;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,27 +40,15 @@ import static org.junit.Assert.fail;
 /**
  * Created by skariyat on 3/13/18.
  */
-public class StorageResourceRepositoryTest {
+public class StorageResourceRepositoryTest extends TestBase {
 
-    private static Initialize initialize;
-    private StorageResourceRepository storageResourceRepository;
     private static final Logger logger = LoggerFactory.getLogger(StorageResourceRepository.class);
 
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            storageResourceRepository = new StorageResourceRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
+    private StorageResourceRepository storageResourceRepository;
 
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    public StorageResourceRepositoryTest() {
+        super(Database.APP_CATALOG);
+        storageResourceRepository = new StorageResourceRepository();
     }
 
     @Test
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/UserResourceProfileRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/UserResourceProfileRepositoryTest.java
index d97c80e..470fb0d 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/UserResourceProfileRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/UserResourceProfileRepositoryTest.java
@@ -3,10 +3,8 @@ package org.apache.airavata.registry.core.repositories.appcatalog;
 import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
 import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
 import org.apache.airavata.model.appcatalog.userresourceprofile.UserStoragePreference;
-import org.apache.airavata.registry.core.repositories.appcatalog.util.Initialize;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -16,29 +14,17 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.*;
 
-public class UserResourceProfileRepositoryTest {
+public class UserResourceProfileRepositoryTest extends TestBase {
+
+    private static final Logger logger = LoggerFactory.getLogger(UserResourceProfileRepositoryTest.class);
 
-    private static Initialize initialize;
     private UserResourceProfileRepository userResourceProfileRepository;
     private String userId = "testUser";
     private String gatewayId = "testGateway";
-    private static final Logger logger = LoggerFactory.getLogger(UserResourceProfileRepositoryTest.class);
-
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("appcatalog-derby.sql");
-            initialize.initializeDB();
-            userResourceProfileRepository = new UserResourceProfileRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
 
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    public UserResourceProfileRepositoryTest() {
+        super(Database.APP_CATALOG);
+        userResourceProfileRepository = new UserResourceProfileRepository();
     }
 
     @Test
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/util/Initialize.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/util/Initialize.java
deleted file mode 100644
index 88d2815..0000000
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/util/Initialize.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.airavata.registry.core.repositories.appcatalog.util;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.derby.drda.NetworkServerControl;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.net.InetAddress;
-import java.net.URI;
-import java.sql.*;
-import java.util.StringTokenizer;
-
-public class Initialize {
-    private static final Logger logger = LoggerFactory.getLogger(Initialize.class);
-    public static final String DERBY_SERVER_MODE_SYS_PROPERTY = "derby.drda.startNetworkServer";
-    public  String scriptName = "appcatalog-derby.sql";
-    private NetworkServerControl server;
-    private static final String delimiter = ";";
-    public static final String COMPUTE_RESOURCE_TABLE = "COMPUTE_RESOURCE";
-    private String jdbcUrl = null;
-    private String jdbcDriver = null;
-    private String jdbcUser = null;
-    private String jdbcPassword = null;
-
-    public Initialize(String scriptName) {
-        this.scriptName = scriptName;
-    }
-
-    public static boolean checkStringBufferEndsWith(StringBuffer buffer, String suffix) {
-        if (suffix.length() > buffer.length()) {
-            return false;
-        }
-        // this loop is done on purpose to avoid memory allocation performance
-        // problems on various JDKs
-        // StringBuffer.lastIndexOf() was introduced in jdk 1.4 and
-        // implementation is ok though does allocation/copying
-        // StringBuffer.toString().endsWith() does massive memory
-        // allocation/copying on JDK 1.5
-        // See http://issues.apache.org/bugzilla/show_bug.cgi?id=37169
-        int endIndex = suffix.length() - 1;
-        int bufferIndex = buffer.length() - 1;
-        while (endIndex >= 0) {
-            if (buffer.charAt(bufferIndex) != suffix.charAt(endIndex)) {
-                return false;
-            }
-            bufferIndex--;
-            endIndex--;
-        }
-        return true;
-    }
-
-    private static boolean isServerStarted(NetworkServerControl server, int ntries)
-    {
-        for (int i = 1; i <= ntries; i ++)
-        {
-            try {
-                Thread.sleep(500);
-                server.ping();
-                return true;
-            }
-            catch (Exception e) {
-                if (i == ntries)
-                    return false;
-            }
-        }
-        return false;
-    }
-
-    public void initializeDB() {
-
-        try{
-            jdbcDriver = ServerSettings.getSetting("appcatalog.jdbc.driver");
-            jdbcUrl = ServerSettings.getSetting("appcatalog.jdbc.url");
-            jdbcUser = ServerSettings.getSetting("appcatalog.jdbc.user");
-            jdbcPassword = ServerSettings.getSetting("appcatalog.jdbc.password");
-            jdbcUrl = jdbcUrl + "?" + "user=" + jdbcUser + "&" + "password=" + jdbcPassword;
-        } catch (ApplicationSettingsException e) {
-            logger.error("Unable to read properties", e);
-        }
-
-        startDerbyInServerMode();
-        if(!isServerStarted(server, 20)){
-           throw new RuntimeException("Derby server cound not started within five seconds...");
-        }
-//      startDerbyInEmbeddedMode();
-
-        Connection conn = null;
-        try {
-            Class.forName(jdbcDriver).newInstance();
-            conn = DriverManager.getConnection(jdbcUrl, jdbcUser, jdbcPassword);
-            if (!isDatabaseStructureCreated(COMPUTE_RESOURCE_TABLE, conn)) {
-                executeSQLScript(conn);
-                logger.info("New Database created for App Catalog !!!");
-            } else {
-                logger.debug("Database already created for App Catalog!");
-            }
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            throw new RuntimeException("Database failure", e);
-        } finally {
-            try {
-                if (conn != null){
-                    if (!conn.getAutoCommit()) {
-                        conn.commit();
-                    }
-                    conn.close();
-                }
-            } catch (SQLException e) {
-                logger.error(e.getMessage(), e);
-            }
-        }
-    }
-
-    public static boolean isDatabaseStructureCreated(String tableName, Connection conn) {
-        try {
-            System.out.println("Running a query to test the database tables existence.");
-            // check whether the tables are already created with a query
-            Statement statement = null;
-            try {
-                statement = conn.createStatement();
-                ResultSet rs = statement.executeQuery("select * from " + tableName);
-                if (rs != null) {
-                    rs.close();
-                }
-            } finally {
-                try {
-                    if (statement != null) {
-                        statement.close();
-                    }
-                } catch (SQLException e) {
-                    return false;
-                }
-            }
-        } catch (SQLException e) {
-            return false;
-        }
-
-        return true;
-    }
-
-    private void executeSQLScript(Connection conn) throws Exception {
-        StringBuffer sql = new StringBuffer();
-        BufferedReader reader = null;
-        try{
-
-        InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream(scriptName);
-        reader = new BufferedReader(new InputStreamReader(inputStream));
-        String line;
-        while ((line = reader.readLine()) != null) {
-            line = line.trim();
-            if (line.startsWith("//")) {
-                continue;
-            }
-            if (line.startsWith("--")) {
-                continue;
-            }
-            StringTokenizer st = new StringTokenizer(line);
-            if (st.hasMoreTokens()) {
-                String token = st.nextToken();
-                if ("REM".equalsIgnoreCase(token)) {
-                    continue;
-                }
-            }
-            sql.append(" ").append(line);
-
-            // SQL defines "--" as a comment to EOL
-            // and in Oracle it may contain a hint
-            // so we cannot just remove it, instead we must end it
-            if (line.indexOf("--") >= 0) {
-                sql.append("\n");
-            }
-            if ((checkStringBufferEndsWith(sql, delimiter))) {
-                executeSQL(sql.substring(0, sql.length() - delimiter.length()), conn);
-                sql.replace(0, sql.length(), "");
-            }
-        }
-        // Catch any statements not followed by ;
-        if (sql.length() > 0) {
-            executeSQL(sql.toString(), conn);
-        }
-        }catch (IOException e){
-            logger.error("Error occurred while executing SQL script for creating Airavata database", e);
-            throw new Exception("Error occurred while executing SQL script for creating Airavata database", e);
-        }finally {
-            if (reader != null) {
-                reader.close();
-            }
-
-        }
-
-    }
-
-    private static void executeSQL(String sql, Connection conn) throws Exception {
-        // Check and ignore empty statements
-        if ("".equals(sql.trim())) {
-            return;
-        }
-
-        Statement statement = null;
-        try {
-            logger.debug("SQL : " + sql);
-
-            boolean ret;
-            int updateCount = 0, updateCountTotal = 0;
-            statement = conn.createStatement();
-            ret = statement.execute(sql);
-            updateCount = statement.getUpdateCount();
-            do {
-                if (!ret) {
-                    if (updateCount != -1) {
-                        updateCountTotal += updateCount;
-                    }
-                }
-                ret = statement.getMoreResults();
-                if (ret) {
-                    updateCount = statement.getUpdateCount();
-                }
-            } while (ret);
-
-            logger.debug(sql + " : " + updateCountTotal + " rows affected");
-
-            SQLWarning warning = conn.getWarnings();
-            while (warning != null) {
-                logger.warn(warning + " sql warning");
-                warning = warning.getNextWarning();
-            }
-            conn.clearWarnings();
-        } catch (SQLException e) {
-            if (e.getSQLState().equals("X0Y32")) {
-                // eliminating the table already exception for the derby
-                // database
-                logger.info("Table Already Exists", e);
-            } else {
-                throw new Exception("Error occurred while executing : " + sql, e);
-            }
-        } finally {
-            if (statement != null) {
-                try {
-                    statement.close();
-                } catch (SQLException e) {
-                    logger.error("Error occurred while closing result set.", e);
-                }
-            }
-        }
-    }
-
-    private void startDerbyInServerMode() {
-        try {
-            System.setProperty(DERBY_SERVER_MODE_SYS_PROPERTY, "true");
-            server = new NetworkServerControl(InetAddress.getByName("0.0.0.0"),
-                    20000,
-                    jdbcUser, jdbcPassword);
-            java.io.PrintWriter consoleWriter = new java.io.PrintWriter(System.out, true);
-            server.start(consoleWriter);
-        } catch (IOException e) {
-            logger.error("Unable to start Apache derby in the server mode! Check whether " +
-                    "specified port is available");
-        } catch (Exception e) {
-            logger.error("Unable to start Apache derby in the server mode! Check whether " +
-                    "specified port is available");
-        }
-
-    }
-
-    public static int getPort(String jdbcURL){
-        try{
-            String cleanURI = jdbcURL.substring(5);
-            URI uri = URI.create(cleanURI);
-            return uri.getPort();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-            return -1;
-        }
-    }
-
-    private void startDerbyInEmbeddedMode(){
-        try {
-            Class.forName("org.apache.derby.jdbc.EmbeddedDriver");
-            DriverManager.getConnection("jdbc:derby:memory:unit-testing-jpa;create=true").close();
-        } catch (ClassNotFoundException e) {
-            logger.error(e.getMessage(), e);
-        } catch (SQLException e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-
-    public void stopDerbyServer() {
-        try {
-            server.shutdown();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
-}
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/DerbyDBManager.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/DerbyDBManager.java
new file mode 100644
index 0000000..d04b2a6
--- /dev/null
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/DerbyDBManager.java
@@ -0,0 +1,239 @@
+package org.apache.airavata.registry.core.repositories.common;
+
+import com.ibatis.common.jdbc.ScriptRunner;
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.derby.drda.NetworkServerControl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.net.InetAddress;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Most of the code in this class was influenced by
+ * http://svn.apache.org/viewvc/db/derby/code/trunk/java/testing/org/apache/derbyTesting/junit/CleanDatabaseTestSetup.java?view=markup
+ */
+public class DerbyDBManager {
+
+    private static final Logger logger = LoggerFactory.getLogger(DerbyDBManager.class);
+
+    private static final String DERBY_SERVER_MODE_SYS_PROPERTY = "derby.drda.startNetworkServer";
+
+    private static final String[] CLEAR_DB_PROPERTIES = {"derby.database.classpath",};
+
+    private NetworkServerControl server;
+    private String jdbcDriver = null;
+    private String jdbcUser = null;
+    private String jdbcPassword = null;
+
+
+    public DerbyDBManager() {
+
+        try {
+            jdbcDriver = ServerSettings.getSetting("appcatalog.jdbc.driver");
+            jdbcUser = ServerSettings.getSetting("appcatalog.jdbc.user");
+            jdbcPassword = ServerSettings.getSetting("appcatalog.jdbc.password");
+        } catch (ApplicationSettingsException e) {
+            logger.error("Unable to read properties", e);
+        }
+    }
+
+    public void startDatabaseServer() {
+        try {
+            System.setProperty(DERBY_SERVER_MODE_SYS_PROPERTY, "true");
+            server = new NetworkServerControl(InetAddress.getByName("0.0.0.0"),
+                    20000,
+                    jdbcUser, jdbcPassword);
+            java.io.PrintWriter consoleWriter = new java.io.PrintWriter(System.out, true);
+            server.start(consoleWriter);
+        } catch (IOException e) {
+            logger.error("Unable to start Apache derby in the server mode! Check whether " +
+                    "specified port is available");
+        } catch (Exception e) {
+            logger.error("Unable to start Apache derby in the server mode! Check whether " +
+                    "specified port is available");
+        }
+    }
+
+    public void stopDatabaseServer() {
+        try {
+            server.shutdown();
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+    }
+
+    public void initializeDatabase(String databaseName, String initFile) {
+
+        String jdbcUrl = "jdbc:derby:" + databaseName + ";create=true;user=" + jdbcUser + ";password=" + jdbcPassword;
+
+        Connection conn = null;
+        try {
+            Class.forName(jdbcDriver).newInstance();
+            conn = DriverManager.getConnection(jdbcUrl, jdbcUser, jdbcPassword);
+            ScriptRunner scriptRunner = new ScriptRunner(conn, false, false);
+            Reader reader = new BufferedReader(new FileReader(initFile));
+            scriptRunner.runScript(reader);
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException("Database failure", e);
+        } finally {
+            try {
+                if (conn != null) {
+                    if (!conn.getAutoCommit()) {
+                        conn.commit();
+                    }
+                    conn.close();
+                }
+            } catch (SQLException e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+    }
+
+    public void destroyDatabase(String databaseName) {
+
+        String jdbcUrl = "jdbc:derby:" + databaseName + ";create=true;user=" + jdbcUser + ";password=" + jdbcPassword;
+
+        Connection conn = null;
+        try {
+            Class.forName(jdbcDriver).newInstance();
+            conn = DriverManager.getConnection(jdbcUrl, jdbcUser, jdbcPassword);
+            conn.setAutoCommit(false);
+            clearProperties(conn);
+            removeObjects(conn);
+            removeRoles(conn);
+            removeUsers(conn);
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+            throw new RuntimeException("Database failure", e);
+        } finally {
+            try {
+                if (conn != null) {
+                    if (!conn.getAutoCommit()) {
+                        conn.commit();
+                    }
+                    conn.close();
+                }
+            } catch (SQLException e) {
+                logger.error(e.getMessage(), e);
+            }
+        }
+    }
+
+
+    private static void clearProperties(Connection conn) throws SQLException {
+        PreparedStatement ps = conn.prepareCall(
+                "CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(?, NULL)");
+
+        for (String CLEAR_DB_PROPERTY : CLEAR_DB_PROPERTIES) {
+            ps.setString(1, CLEAR_DB_PROPERTY);
+            ps.executeUpdate();
+        }
+        ps.close();
+        conn.commit();
+    }
+
+    private static void removeObjects(Connection conn) throws SQLException {
+
+        DatabaseMetaData dmd = conn.getMetaData();
+
+        SQLException sqle = null;
+        // Loop a number of arbitary times to catch cases
+        // where objects are dependent on objects in
+        // different schemas.
+        for (int count = 0; count < 5; count++) {
+            // Fetch all the user schemas into a list
+            List<String> schemas = new ArrayList<>();
+            ResultSet rs = dmd.getSchemas();
+            while (rs.next()) {
+
+                String schema = rs.getString("TABLE_SCHEM");
+                if (schema.startsWith("SYS"))
+                    continue;
+                if (schema.equals("SQLJ"))
+                    continue;
+                if (schema.equals("NULLID"))
+                    continue;
+
+                schemas.add(schema);
+            }
+            rs.close();
+
+            // DROP all the user schemas.
+            sqle = null;
+            for (String schema : schemas) {
+                try {
+                    JdbcUtil.dropSchema(dmd, schema);
+                } catch (SQLException e) {
+                    sqle = e;
+                }
+            }
+            // No errors means all the schemas we wanted to
+            // drop were dropped, so nothing more to do.
+            if (sqle == null)
+                return;
+        }
+        throw sqle;
+    }
+
+    private static void removeRoles(Connection conn) throws SQLException {
+        // No metadata for roles, so do a query against SYSROLES
+        Statement stm = conn.createStatement();
+        Statement dropStm = conn.createStatement();
+
+        // cast to overcome territory differences in some cases:
+        ResultSet rs = stm.executeQuery(
+                "select roleid from sys.sysroles where " +
+                        "cast(isdef as char(1)) = 'Y'");
+
+        while (rs.next()) {
+            dropStm.executeUpdate("DROP ROLE " + JdbcUtil.escape(rs.getString(1)));
+        }
+
+        stm.close();
+        dropStm.close();
+        conn.commit();
+    }
+
+    private static void removeUsers(Connection conn) throws SQLException {
+        // Get the users
+        Statement stm = conn.createStatement();
+        ResultSet rs = stm.executeQuery("select username from sys.sysusers");
+        ArrayList<String> users = new ArrayList<String>();
+
+        while (rs.next()) {
+            users.add(rs.getString(1));
+        }
+        rs.close();
+        stm.close();
+
+        // Now delete them
+        PreparedStatement ps = conn.prepareStatement("call syscs_util.syscs_drop_user( ? )");
+
+        for (int i = 0; i < users.size(); i++) {
+            ps.setString(1, (String) users.get(i));
+
+            // you can't drop the DBO's credentials. sorry.
+            try {
+                ps.executeUpdate();
+            } catch (SQLException se) {
+                if ("4251F".equals(se.getSQLState())) {
+                    continue;
+                } else {
+                    throw se;
+                }
+            }
+        }
+
+        ps.close();
+        conn.commit();
+    }
+}
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/JdbcUtil.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/JdbcUtil.java
new file mode 100644
index 0000000..d253bfa
--- /dev/null
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/JdbcUtil.java
@@ -0,0 +1,357 @@
+/*
+ *
+ * Derby - Class org.apache.derbyTesting.junit.JDBC
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific
+ * language governing permissions and limitations under the License.
+ */
+package org.apache.airavata.registry.core.repositories.common;
+
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.ListIterator;
+
+import junit.framework.Assert;
+
+/**
+ * JdbcUtil utility methods for the JUnit tests.
+ * Note that JSR 169 is a subset of JdbcUtil 3 and
+ * JdbcUtil 3 is a subset of JdbcUtil 4.
+ * The base level for the Derby tests is JSR 169.
+ *
+ * Borrowed from http://svn.apache.org/viewvc/db/derby/code/trunk/java/testing/org/apache/derbyTesting/junit/JDBC.java?view=markup
+ */
+public class JdbcUtil {
+
+    /**
+     * Constant to pass to DatabaseMetaData.getTables() to fetch
+     * just synonyms.
+     */
+    public static final String[] GET_TABLES_SYNONYM =
+            new String[]{"SYNONYM"};
+
+    /**
+     * Constant to pass to DatabaseMetaData.getTables() to fetch
+     * just views.
+     */
+    public static final String[] GET_TABLES_VIEW = new String[]{"VIEW"};
+
+    /**
+     * Constant to pass to DatabaseMetaData.getTables() to fetch
+     * just tables.
+     */
+    public static final String[] GET_TABLES_TABLE = new String[]{"TABLE"};
+
+    public static String escape(String name) {
+        StringBuffer buffer = new StringBuffer(name.length() + 2);
+        buffer.append('"');
+        for (int i = 0; i < name.length(); i++) {
+            char c = name.charAt(i);
+            // escape double quote characters with an extra double quote
+            if (c == '"') buffer.append('"');
+            buffer.append(c);
+        }
+        buffer.append('"');
+        return buffer.toString();
+    }
+
+    /**
+     * Escape a schama-qualified name so that it is suitable
+     * for use in a SQL query executed by JdbcUtil.
+     */
+    public static String escape(String schema, String name) {
+        return escape(schema) + "." + escape(name);
+    }
+
+    /**
+     * Drop a database schema by dropping all objects in it
+     * and then executing DROP SCHEMA. If the schema is
+     * APP it is cleaned but DROP SCHEMA is not executed.
+     * <p>
+     * TODO: Handle dependencies by looping in some intelligent
+     * way until everything can be dropped.
+     *
+     * @param dmd    DatabaseMetaData object for database
+     * @param schema Name of the schema
+     * @throws SQLException database error
+     */
+    public static void dropSchema(DatabaseMetaData dmd, String schema) throws SQLException {
+        Connection conn = dmd.getConnection();
+        Assert.assertFalse(conn.getAutoCommit());
+        Statement s = dmd.getConnection().createStatement();
+
+        // Triggers
+        PreparedStatement pstr = conn.prepareStatement(
+                "SELECT TRIGGERNAME FROM SYS.SYSSCHEMAS S, SYS.SYSTRIGGERS T "
+                        + "WHERE S.SCHEMAID = T.SCHEMAID AND SCHEMANAME = ?");
+        pstr.setString(1, schema);
+        ResultSet trrs = pstr.executeQuery();
+        while (trrs.next()) {
+            String trigger = trrs.getString(1);
+            s.execute("DROP TRIGGER " + JdbcUtil.escape(schema, trigger));
+        }
+        trrs.close();
+        pstr.close();
+
+        // Functions - not supported by JdbcUtil meta data until JdbcUtil 4
+        // Need to use the CHAR() function on A.ALIASTYPE
+        // so that the compare will work in any schema.
+        PreparedStatement psf = conn.prepareStatement(
+                "SELECT ALIAS FROM SYS.SYSALIASES A, SYS.SYSSCHEMAS S" +
+                        " WHERE A.SCHEMAID = S.SCHEMAID " +
+                        " AND CHAR(A.ALIASTYPE) = ? " +
+                        " AND S.SCHEMANAME = ?");
+        psf.setString(1, "F");
+        psf.setString(2, schema);
+        ResultSet rs = psf.executeQuery();
+        dropUsingDMD(s, rs, schema, "ALIAS", "FUNCTION");
+
+        // Procedures
+        rs = dmd.getProcedures((String) null,
+                schema, (String) null);
+
+        dropUsingDMD(s, rs, schema, "PROCEDURE_NAME", "PROCEDURE");
+
+        // Views
+        rs = dmd.getTables((String) null, schema, (String) null,
+                GET_TABLES_VIEW);
+
+        dropUsingDMD(s, rs, schema, "TABLE_NAME", "VIEW");
+
+        // Tables
+        rs = dmd.getTables((String) null, schema, (String) null,
+                GET_TABLES_TABLE);
+
+        dropUsingDMD(s, rs, schema, "TABLE_NAME", "TABLE");
+
+        // At this point there may be tables left due to
+        // foreign key constraints leading to a dependency loop.
+        // Drop any constraints that remain and then drop the tables.
+        // If there are no tables then this should be a quick no-op.
+        ResultSet table_rs = dmd.getTables((String) null, schema, (String) null,
+                GET_TABLES_TABLE);
+
+        while (table_rs.next()) {
+            String tablename = table_rs.getString("TABLE_NAME");
+            rs = dmd.getExportedKeys((String) null, schema, tablename);
+            while (rs.next()) {
+                short keyPosition = rs.getShort("KEY_SEQ");
+                if (keyPosition != 1)
+                    continue;
+                String fkName = rs.getString("FK_NAME");
+                // No name, probably can't happen but couldn't drop it anyway.
+                if (fkName == null)
+                    continue;
+                String fkSchema = rs.getString("FKTABLE_SCHEM");
+                String fkTable = rs.getString("FKTABLE_NAME");
+
+                String ddl = "ALTER TABLE " +
+                        JdbcUtil.escape(fkSchema, fkTable) +
+                        " DROP FOREIGN KEY " +
+                        JdbcUtil.escape(fkName);
+                s.executeUpdate(ddl);
+            }
+            rs.close();
+        }
+        table_rs.close();
+        conn.commit();
+
+        // Tables (again)
+        rs = dmd.getTables((String) null, schema, (String) null,
+                GET_TABLES_TABLE);
+        dropUsingDMD(s, rs, schema, "TABLE_NAME", "TABLE");
+
+        // drop UDTs
+        psf.setString(1, "A");
+        psf.setString(2, schema);
+        rs = psf.executeQuery();
+        dropUsingDMD(s, rs, schema, "ALIAS", "TYPE");
+
+        // drop aggregates
+        psf.setString(1, "G");
+        psf.setString(2, schema);
+        rs = psf.executeQuery();
+        dropUsingDMD(s, rs, schema, "ALIAS", "DERBY AGGREGATE");
+        psf.close();
+
+        // Synonyms - need work around for DERBY-1790 where
+        // passing a table type of SYNONYM fails.
+        rs = dmd.getTables((String) null, schema, (String) null,
+                GET_TABLES_SYNONYM);
+
+        dropUsingDMD(s, rs, schema, "TABLE_NAME", "SYNONYM");
+
+        // sequences
+        if (sysSequencesExists(conn)) {
+            psf = conn.prepareStatement
+                    (
+                            "SELECT SEQUENCENAME FROM SYS.SYSSEQUENCES A, SYS.SYSSCHEMAS S" +
+                                    " WHERE A.SCHEMAID = S.SCHEMAID " +
+                                    " AND S.SCHEMANAME = ?");
+            psf.setString(1, schema);
+            rs = psf.executeQuery();
+            dropUsingDMD(s, rs, schema, "SEQUENCENAME", "SEQUENCE");
+            psf.close();
+        }
+
+        // Finally drop the schema if it is not APP
+        if (!schema.equals("APP")) {
+            s.executeUpdate("DROP SCHEMA " + JdbcUtil.escape(schema) + " RESTRICT");
+        }
+        conn.commit();
+        s.close();
+    }
+
+
+    /**
+     * DROP a set of objects based upon a ResultSet from a
+     * DatabaseMetaData call.
+     * <p>
+     * TODO: Handle errors to ensure all objects are dropped,
+     * probably requires interaction with its caller.
+     *
+     * @param s        Statement object used to execute the DROP commands.
+     * @param rs       DatabaseMetaData ResultSet
+     * @param schema   Schema the objects are contained in
+     * @param mdColumn The column name used to extract the object's
+     *                 name from rs
+     * @param dropType The keyword to use after DROP in the SQL statement
+     * @throws SQLException database errors.
+     */
+    private static void dropUsingDMD(
+            Statement s, ResultSet rs, String schema,
+            String mdColumn,
+            String dropType) throws SQLException {
+        String dropLeadIn = "DROP " + dropType + " ";
+
+        // First collect the set of DROP SQL statements.
+        ArrayList<String> ddl = new ArrayList<String>();
+        while (rs.next()) {
+            String objectName = rs.getString(mdColumn);
+            String raw = dropLeadIn + JdbcUtil.escape(schema, objectName);
+            if (
+                    "TYPE".equals(dropType) ||
+                            "SEQUENCE".equals(dropType) ||
+                            "DERBY AGGREGATE".equals(dropType)
+                    ) {
+                raw = raw + " restrict ";
+            }
+            ddl.add(raw);
+        }
+        rs.close();
+        if (ddl.isEmpty())
+            return;
+
+        // Execute them as a complete batch, hoping they will all succeed.
+        s.clearBatch();
+        int batchCount = 0;
+        for (Iterator i = ddl.iterator(); i.hasNext(); ) {
+            Object sql = i.next();
+            if (sql != null) {
+                s.addBatch(sql.toString());
+                batchCount++;
+            }
+        }
+
+        int[] results;
+        boolean hadError;
+        try {
+            results = s.executeBatch();
+            Assert.assertNotNull(results);
+            Assert.assertEquals("Incorrect result length from executeBatch",
+                    batchCount, results.length);
+            hadError = false;
+        } catch (BatchUpdateException batchException) {
+            results = batchException.getUpdateCounts();
+            Assert.assertNotNull(results);
+            Assert.assertTrue("Too many results in BatchUpdateException",
+                    results.length <= batchCount);
+            hadError = true;
+        }
+
+        // Remove any statements from the list that succeeded.
+        boolean didDrop = false;
+        for (int i = 0; i < results.length; i++) {
+            int result = results[i];
+            if (result == Statement.EXECUTE_FAILED)
+                hadError = true;
+            else if (result == Statement.SUCCESS_NO_INFO || result >= 0) {
+                didDrop = true;
+                ddl.set(i, null);
+            } else
+                Assert.fail("Negative executeBatch status");
+        }
+        s.clearBatch();
+        if (didDrop) {
+            // Commit any work we did do.
+            s.getConnection().commit();
+        }
+
+        // If we had failures drop them as individual statements
+        // until there are none left or none succeed. We need to
+        // do this because the batch processing stops at the first
+        // error. This copes with the simple case where there
+        // are objects of the same type that depend on each other
+        // and a different drop order will allow all or most
+        // to be dropped.
+        if (hadError) {
+            do {
+                hadError = false;
+                didDrop = false;
+                for (ListIterator<String> i = ddl.listIterator(); i.hasNext(); ) {
+                    String sql = i.next();
+                    if (sql != null) {
+                        try {
+                            s.executeUpdate(sql);
+                            i.set(null);
+                            didDrop = true;
+                        } catch (SQLException e) {
+                            hadError = true;
+                        }
+                    }
+                }
+                if (didDrop)
+                    s.getConnection().commit();
+            } while (hadError && didDrop);
+        }
+    }
+
+    /**
+     * Return true if the SYSSEQUENCES table exists.
+     */
+    private static boolean sysSequencesExists(Connection conn) throws SQLException {
+        PreparedStatement ps = null;
+        ResultSet rs = null;
+        try {
+            ps = conn.prepareStatement
+                    (
+                            "select count(*) from sys.systables t, sys.sysschemas s\n" +
+                                    "where t.schemaid = s.schemaid\n" +
+                                    "and ( cast(s.schemaname as varchar(128)))= 'SYS'\n" +
+                                    "and ( cast(t.tablename as varchar(128))) = 'SYSSEQUENCES'");
+            rs = ps.executeQuery();
+            rs.next();
+            return (rs.getInt(1) > 0);
+        } finally {
+            if (rs != null) {
+                rs.close();
+            }
+            if (ps != null) {
+                ps.close();
+            }
+        }
+    }
+}
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/TestBase.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/TestBase.java
new file mode 100644
index 0000000..80b2b9b
--- /dev/null
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/common/TestBase.java
@@ -0,0 +1,78 @@
+package org.apache.airavata.registry.core.repositories.common;
+
+import org.apache.airavata.registry.core.repositories.appcatalog.ApplicationDeploymentRepositoryTest;
+import org.junit.After;
+import org.junit.Before;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Objects;
+
+public class TestBase {
+
+    private static final Logger logger = LoggerFactory.getLogger(ApplicationDeploymentRepositoryTest.class);
+
+    public enum Database {APP_CATALOG, EXP_CATALOG, REPLICA_CATALOG}
+
+    private DerbyDBManager dbManager = new DerbyDBManager();
+    private Database[] databases;
+
+    public TestBase(Database... databases) {
+        if (databases == null) {
+            throw new IllegalArgumentException("Databases can not be null");
+        }
+        this.databases = databases;
+    }
+
+    @Before
+    public void setUp() {
+        try {
+            dbManager.startDatabaseServer();
+
+            for (Database database: databases) {
+                logger.info("Creating database " + database.name());
+                dbManager.destroyDatabase(getDatabaseName(database));
+                dbManager.initializeDatabase(getDatabaseName(database), getDatabasePath(database));
+            }
+        } catch (Exception e) {
+            logger.error("Failed to create the databases" , e);
+            throw e;
+        }
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        for (Database database: databases) {
+            System.out.println("Tearing down database " + database.name());
+            dbManager.destroyDatabase(getDatabaseName(database));
+        }
+        dbManager.stopDatabaseServer();
+    }
+
+    private String getDatabasePath(Database database) {
+        switch (database) {
+            case APP_CATALOG:
+                return Objects.requireNonNull(getClass().getClassLoader().getResource("appcatalog-derby.sql")).getPath();
+            case EXP_CATALOG:
+                return Objects.requireNonNull(getClass().getClassLoader().getResource("expcatalog-derby.sql")).getPath();
+            case REPLICA_CATALOG:
+                return Objects.requireNonNull(getClass().getClassLoader().getResource("replicacatalog-derby.sql")).getPath();
+            default:
+                return null;
+        }
+    }
+
+    private String getDatabaseName(Database database) {
+        switch (database) {
+            case APP_CATALOG:
+                return "app_catalog";
+            case EXP_CATALOG:
+                return "experiment_catalog";
+            case REPLICA_CATALOG:
+                return "replica_catalog";
+            default:
+                return null;
+        }
+    }
+
+}
diff --git a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentRepositoryTest.java b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentRepositoryTest.java
index 1f9844a..4653d44 100644
--- a/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentRepositoryTest.java
+++ b/modules/registry-refactoring/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentRepositoryTest.java
@@ -27,6 +27,7 @@ import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel
 import org.apache.airavata.model.status.ExperimentState;
 import org.apache.airavata.model.workspace.Gateway;
 import org.apache.airavata.model.workspace.Project;
+import org.apache.airavata.registry.core.repositories.common.TestBase;
 import org.apache.airavata.registry.core.repositories.expcatalog.util.Initialize;
 import org.apache.airavata.registry.core.utils.DBConstants;
 import org.apache.airavata.registry.cpi.RegistryException;
@@ -40,31 +41,19 @@ import java.util.List;
 
 import static org.junit.Assert.*;
 
-public class ExperimentRepositoryTest {
+public class ExperimentRepositoryTest extends TestBase {
+
+    private static final Logger logger = LoggerFactory.getLogger(ExperimentRepositoryTest.class);
 
-    private static Initialize initialize;
     GatewayRepository gatewayRepository;
     ProjectRepository projectRepository;
     ExperimentRepository experimentRepository;
-    private static final Logger logger = LoggerFactory.getLogger(ExperimentRepositoryTest.class);
-
-    @Before
-    public void setUp() {
-        try {
-            initialize = new Initialize("expcatalog-derby.sql");
-            initialize.initializeDB();
-            gatewayRepository = new GatewayRepository();
-            projectRepository = new ProjectRepository();
-            experimentRepository = new ExperimentRepository();
-        } catch (Exception e) {
-            logger.error(e.getMessage(), e);
-        }
-    }
 
-    @After
-    public void tearDown() throws Exception {
-        System.out.println("********** TEAR DOWN ************");
-        initialize.stopDerbyServer();
+    public ExperimentRepositoryTest() {
+        super(Database.EXP_CATALOG);
+        gatewayRepository = new GatewayRepository();
+        projectRepository = new ProjectRepository();
+        experimentRepository = new ExperimentRepository();
     }
 
     @Test
diff --git a/modules/registry-refactoring/src/main/resources/appcatalog-mysql.sql b/modules/registry-refactoring/src/test/resources/appcatalog-derby.sql
similarity index 83%
copy from modules/registry-refactoring/src/main/resources/appcatalog-mysql.sql
copy to modules/registry-refactoring/src/test/resources/appcatalog-derby.sql
index 39a0069..d37dd61 100644
--- a/modules/registry-refactoring/src/main/resources/appcatalog-mysql.sql
+++ b/modules/registry-refactoring/src/test/resources/appcatalog-derby.sql
@@ -18,23 +18,24 @@
  * under the License.
  *
  */
+
 CREATE TABLE COMPUTE_RESOURCE
 (
-          RESOURCE_ID VARCHAR (255) NOT NULL,
-          HOST_NAME VARCHAR (255) NOT NULL,
-          RESOURCE_DESCRIPTION VARCHAR (255),
-          CREATION_TIME TIMESTAMP DEFAULT NOW(),
-          UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
-          MAX_MEMORY_NODE INTEGER,
-          CPUS_PER_NODE INTEGER,
-          DEFAULT_NODE_COUNT INTEGER,
-          DEFAULT_CPU_COUNT INTEGER,
-          DEFAULT_WALLTIME INTEGER,
-          ENABLED SMALLINT,
-          GATEWAY_USAGE_REPORTING TINYINT(1),
-          GATEWAY_USAGE_MODULE_LOAD_CMD VARCHAR(500),
-          GATEWAY_USAGE_EXECUTABLE VARCHAR(255),
-          PRIMARY KEY (RESOURCE_ID)
+        RESOURCE_ID VARCHAR (255) NOT NULL,
+        HOST_NAME VARCHAR (255) NOT NULL,
+        RESOURCE_DESCRIPTION VARCHAR (255),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        MAX_MEMORY_NODE INTEGER,
+        CPUS_PER_NODE INTEGER,
+        DEFAULT_NODE_COUNT INTEGER,
+        DEFAULT_CPU_COUNT INTEGER,
+        DEFAULT_WALLTIME INTEGER,
+        ENABLED SMALLINT,
+        GATEWAY_USAGE_REPORTING SMALLINT,
+        GATEWAY_USAGE_MODULE_LOAD_CMD VARCHAR(500),
+        GATEWAY_USAGE_EXECUTABLE VARCHAR(255),
+        PRIMARY KEY (RESOURCE_ID)
 );
 
 CREATE TABLE HOST_ALIAS
@@ -71,21 +72,7 @@ CREATE TABLE GSISSH_EXPORT
          FOREIGN KEY (SUBMISSION_ID) REFERENCES GSISSH_SUBMISSION(SUBMISSION_ID) ON DELETE CASCADE
 );
 
-CREATE TABLE GSISSH_PREJOBCOMMAND
-(
-         SUBMISSION_ID VARCHAR(255),
-         COMMAND VARCHAR(255),
-         PRIMARY KEY(SUBMISSION_ID, COMMAND),
-         FOREIGN KEY (SUBMISSION_ID) REFERENCES GSISSH_SUBMISSION(SUBMISSION_ID) ON DELETE CASCADE
-);
 
-CREATE TABLE GSISSH_POSTJOBCOMMAND
-(
-         SUBMISSION_ID VARCHAR(255),
-         COMMAND VARCHAR(255),
-         PRIMARY KEY(SUBMISSION_ID, COMMAND),
-         FOREIGN KEY (SUBMISSION_ID) REFERENCES GSISSH_SUBMISSION(SUBMISSION_ID) ON DELETE CASCADE
-);
 
 CREATE TABLE GLOBUS_SUBMISSION
 (
@@ -94,6 +81,7 @@ CREATE TABLE GLOBUS_SUBMISSION
          SECURITY_PROTOCAL VARCHAR(255),
          PRIMARY KEY(SUBMISSION_ID)
 );
+
 CREATE TABLE UNICORE_SUBMISSION
 (
          SUBMISSION_ID VARCHAR(255),
@@ -105,11 +93,12 @@ CREATE TABLE UNICORE_SUBMISSION
 CREATE TABLE UNICORE_DATAMOVEMENT
 (
          DATAMOVEMENT_ID VARCHAR(255),
-         SECURITY_PROTOCAL VARCHAR(255),
+         SECURITY_PROTOCOL VARCHAR(255),
          UNICORE_ENDPOINT_URL VARCHAR(255),
          PRIMARY KEY(DATAMOVEMENT_ID)
 );
 
+
 CREATE TABLE GLOBUS_GK_ENDPOINT
 (
          SUBMISSION_ID VARCHAR(255),
@@ -124,11 +113,13 @@ CREATE TABLE RESOURCE_JOB_MANAGER
         PUSH_MONITORING_ENDPOINT VARCHAR (255),
         JOB_MANAGER_BIN_PATH VARCHAR (255),
         RESOURCE_JOB_MANAGER_TYPE VARCHAR (255) NOT NULL,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (RESOURCE_JOB_MANAGER_ID)
 );
 
+
+
 CREATE TABLE SSH_JOB_SUBMISSION
 (
         RESOURCE_JOB_MANAGER_ID VARCHAR (255) NOT NULL,
@@ -137,13 +128,12 @@ CREATE TABLE SSH_JOB_SUBMISSION
         SECURITY_PROTOCOL VARCHAR (255) NOT NULL,
         SSH_PORT INTEGER,
         MONITOR_MODE VARCHAR (255),
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
-        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (JOB_SUBMISSION_INTERFACE_ID),
         FOREIGN KEY (RESOURCE_JOB_MANAGER_ID) REFERENCES RESOURCE_JOB_MANAGER(RESOURCE_JOB_MANAGER_ID)
 );
 
-
 CREATE TABLE SCP_DATA_MOVEMENT
 (
         QUEUE_DESCRIPTION VARCHAR (255),
@@ -151,8 +141,8 @@ CREATE TABLE SCP_DATA_MOVEMENT
         SECURITY_PROTOCOL VARCHAR (255) NOT NULL,
         ALTERNATIVE_SCP_HOSTNAME VARCHAR (255),
         SSH_PORT INTEGER,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
-        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (DATA_MOVEMENT_INTERFACE_ID)
 );
 
@@ -160,8 +150,8 @@ CREATE TABLE GRIDFTP_DATA_MOVEMENT
 (
         DATA_MOVEMENT_INTERFACE_ID VARCHAR (255) NOT NULL,
         SECURITY_PROTOCOL VARCHAR (255) NOT NULL,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
-        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (DATA_MOVEMENT_INTERFACE_ID)
 );
 
@@ -170,8 +160,8 @@ CREATE TABLE GRIDFTP_ENDPOINT
         ENDPOINT VARCHAR (255) NOT NULL,
         DATA_MOVEMENT_INTERFACE_ID VARCHAR (255) NOT NULL,
         PRIMARY KEY (DATA_MOVEMENT_INTERFACE_ID,ENDPOINT),
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
-        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         FOREIGN KEY (DATA_MOVEMENT_INTERFACE_ID) REFERENCES GRIDFTP_DATA_MOVEMENT(DATA_MOVEMENT_INTERFACE_ID) ON DELETE CASCADE
 );
 
@@ -180,6 +170,8 @@ CREATE TABLE GRIDFTP_ENDPOINT
 --         RESOURCE_ID VARCHAR(255),
 --         SUBMISSION_ID VARCHAR(255),
 --         JOB_TYPE VARCHAR(255),
+--         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+--         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
 --         PRIMARY KEY(RESOURCE_ID,SUBMISSION_ID,JOB_TYPE),
 --         FOREIGN KEY (RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE
 --);
@@ -189,6 +181,8 @@ CREATE TABLE GRIDFTP_ENDPOINT
 --         RESOURCE_ID VARCHAR(255),
 --         DATA_MOVE_ID VARCHAR(255),
 --         DATA_MOVE_TYPE VARCHAR(255),
+--         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+--         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
 --         PRIMARY KEY(RESOURCE_ID,DATA_MOVE_ID,DATA_MOVE_TYPE),
 --         FOREIGN KEY (RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE
 --);
@@ -200,8 +194,8 @@ CREATE TABLE APPLICATION_MODULE
          MODULE_VERSION VARCHAR(255),
          MODULE_DESC VARCHAR(500),
          GATEWAY_ID VARCHAR (255),
-	       CREATION_TIME TIMESTAMP DEFAULT NOW(),
-         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ,
+         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
          PRIMARY KEY(MODULE_ID)
 );
 
@@ -213,20 +207,47 @@ CREATE TABLE APPLICATION_DEPLOYMENT
          EXECUTABLE_PATH VARCHAR(255),
 	       PARALLELISM VARCHAR(255),
          APPLICATION_DESC VARCHAR(255),
-         ENV_MODULE_LOAD_CMD VARCHAR(255),
-	       CREATION_TIME TIMESTAMP DEFAULT NOW(),
-         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ,
+         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
          GATEWAY_ID VARCHAR(255),
          DEFAULT_QUEUE_NAME VARCHAR(255),
          DEFAULT_NODE_COUNT INTEGER,
          DEFAULT_CPU_COUNT INTEGER,
          DEFAULT_WALLTIME INTEGER,
-         EDITABLE_BY_USER TINYINT(1),
+         EDITABLE_BY_USER BOOLEAN,
+         ENV_MODULE_LOAD_CMD VARCHAR(255),
          PRIMARY KEY(DEPLOYMENT_ID),
          FOREIGN KEY (COMPUTE_HOSTID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE,
          FOREIGN KEY (APP_MODULE_ID) REFERENCES APPLICATION_MODULE(MODULE_ID) ON DELETE CASCADE
 );
 
+CREATE TABLE MODULE_LOAD_CMD
+(
+        CMD VARCHAR (255) NOT NULL,
+        APP_DEPLOYMENT_ID VARCHAR (255) NOT NULL,
+        COMMAND_ORDER INTEGER,
+        PRIMARY KEY (APP_DEPLOYMENT_ID,CMD),
+        FOREIGN KEY (APP_DEPLOYMENT_ID) REFERENCES APPLICATION_DEPLOYMENT(DEPLOYMENT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE PREJOB_COMMAND
+(
+         APPDEPLOYMENT_ID VARCHAR(255),
+         COMMAND VARCHAR(255),
+         COMMAND_ORDER INTEGER,
+         PRIMARY KEY(APPDEPLOYMENT_ID, COMMAND),
+         FOREIGN KEY (APPDEPLOYMENT_ID) REFERENCES APPLICATION_DEPLOYMENT(DEPLOYMENT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE POSTJOB_COMMAND
+(
+         APPDEPLOYMENT_ID VARCHAR(255),
+         COMMAND VARCHAR(255),
+         COMMAND_ORDER INTEGER,
+         PRIMARY KEY(APPDEPLOYMENT_ID, COMMAND),
+         FOREIGN KEY (APPDEPLOYMENT_ID) REFERENCES APPLICATION_DEPLOYMENT(DEPLOYMENT_ID) ON DELETE CASCADE
+);
+
 CREATE TABLE LIBRARY_PREPAND_PATH
 (
          DEPLOYMENT_ID VARCHAR(255),
@@ -250,26 +271,11 @@ CREATE TABLE APP_ENVIRONMENT
          DEPLOYMENT_ID VARCHAR(255),
          NAME VARCHAR(255),
          VALUE VARCHAR(255),
+         ENV_ORDER INTEGER,
          PRIMARY KEY(DEPLOYMENT_ID, NAME),
          FOREIGN KEY (DEPLOYMENT_ID) REFERENCES APPLICATION_DEPLOYMENT(DEPLOYMENT_ID) ON DELETE CASCADE
 );
 
-CREATE TABLE PREJOB_COMMAND
-(
-         APPDEPLOYMENT_ID VARCHAR(255),
-         COMMAND VARCHAR(255),
-         PRIMARY KEY(APPDEPLOYMENT_ID, COMMAND),
-         FOREIGN KEY (APPDEPLOYMENT_ID) REFERENCES APPLICATION_DEPLOYMENT(DEPLOYMENT_ID) ON DELETE CASCADE
-);
-
-CREATE TABLE POSTJOB_COMMAND
-(
-         APPDEPLOYMENT_ID VARCHAR(255),
-         COMMAND VARCHAR(255),
-         PRIMARY KEY(APPDEPLOYMENT_ID, COMMAND),
-         FOREIGN KEY (APPDEPLOYMENT_ID) REFERENCES APPLICATION_DEPLOYMENT(DEPLOYMENT_ID) ON DELETE CASCADE
-);
-
 CREATE TABLE APPLICATION_INTERFACE
 (
          INTERFACE_ID VARCHAR(255),
@@ -277,9 +283,9 @@ CREATE TABLE APPLICATION_INTERFACE
          APPLICATION_DESCRIPTION VARCHAR(500),
          GATEWAY_ID VARCHAR(255),
          ARCHIVE_WORKING_DIRECTORY SMALLINT,
-         HAS_OPTIONAL_FILE_INPUTS TINYINT(1),
-	       CREATION_TIME TIMESTAMP DEFAULT NOW(),
+         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
          UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+         HAS_OPTIONAL_FILE_INPUTS SMALLINT,
          PRIMARY KEY(INTERFACE_ID)
 );
 
@@ -301,12 +307,12 @@ CREATE TABLE APPLICATION_INPUT
          METADATA VARCHAR(255),
          APP_ARGUMENT VARCHAR(255),
          STANDARD_INPUT SMALLINT,
-         USER_FRIENDLY_DESC VARCHAR(255),
          INPUT_ORDER INTEGER,
          IS_REQUIRED SMALLINT,
          REQUIRED_TO_COMMANDLINE SMALLINT,
          DATA_STAGED SMALLINT,
-         IS_READ_ONLY SMALLINT,
+         USER_FRIENDLY_DESC VARCHAR(255),
+  IS_READ_ONLY SMALLINT,
          PRIMARY KEY(INTERFACE_ID,INPUT_KEY),
          FOREIGN KEY (INTERFACE_ID) REFERENCES APPLICATION_INTERFACE(INTERFACE_ID) ON DELETE CASCADE
 );
@@ -331,8 +337,8 @@ CREATE TABLE APPLICATION_OUTPUT
 CREATE TABLE GATEWAY_PROFILE
 (
          GATEWAY_ID VARCHAR(255),
-	       CREATION_TIME TIMESTAMP DEFAULT NOW(),
-         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00' ,
+         CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
          CS_TOKEN VARCHAR (255),
          IDENTITY_SERVER_TENANT VARCHAR (255),
          IDENTITY_SERVER_PWD_CRED_TOKEN VARCHAR (255),
@@ -361,7 +367,7 @@ CREATE TABLE COMPUTE_RESOURCE_PREFERENCE
         PRIMARY KEY(GATEWAY_ID,RESOURCE_ID),
         FOREIGN KEY (RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE,
         FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY_PROFILE(GATEWAY_ID) ON DELETE CASCADE
-) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE SSH_ACCOUNT_PROVISIONER_CONFIG
 (
@@ -371,7 +377,7 @@ CREATE TABLE SSH_ACCOUNT_PROVISIONER_CONFIG
         CONFIG_VALUE VARCHAR(255),
         PRIMARY KEY (GATEWAY_ID, RESOURCE_ID, CONFIG_NAME),
         FOREIGN KEY (GATEWAY_ID, RESOURCE_ID) REFERENCES COMPUTE_RESOURCE_PREFERENCE (GATEWAY_ID, RESOURCE_ID) ON DELETE CASCADE
-) ENGINE=MyISAM DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE BATCH_QUEUE
 (
@@ -388,7 +394,7 @@ CREATE TABLE BATCH_QUEUE
         DEFAULT_CPU_COUNT INTEGER,
         DEFAULT_WALLTIME INTEGER,
         QUEUE_SPECIFIC_MACROS VARCHAR(255),
-        IS_DEFAULT_QUEUE TINYINT(1),
+        IS_DEFAULT_QUEUE BOOLEAN,
         PRIMARY KEY (COMPUTE_RESOURCE_ID,QUEUE_NAME),
         FOREIGN KEY (COMPUTE_RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE
 );
@@ -400,7 +406,7 @@ CREATE TABLE COMPUTE_RESOURCE_FILE_SYSTEM
         FILE_SYSTEM VARCHAR (255) NOT NULL,
         PRIMARY KEY (COMPUTE_RESOURCE_ID,FILE_SYSTEM),
         FOREIGN KEY (COMPUTE_RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE
-);
+  );
 
 CREATE TABLE JOB_SUBMISSION_INTERFACE
 (
@@ -408,7 +414,7 @@ CREATE TABLE JOB_SUBMISSION_INTERFACE
         COMPUTE_RESOURCE_ID VARCHAR (255) NOT NULL,
         JOB_SUBMISSION_PROTOCOL VARCHAR (255) NOT NULL,
         PRIORITY_ORDER INTEGER,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (COMPUTE_RESOURCE_ID,JOB_SUBMISSION_INTERFACE_ID),
         FOREIGN KEY (COMPUTE_RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE
@@ -420,7 +426,7 @@ CREATE TABLE DATA_MOVEMENT_INTERFACE
         DATA_MOVEMENT_PROTOCOL VARCHAR (255) NOT NULL,
         DATA_MOVEMENT_INTERFACE_ID VARCHAR (255) NOT NULL,
         PRIORITY_ORDER INTEGER,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (COMPUTE_RESOURCE_ID,DATA_MOVEMENT_INTERFACE_ID),
         FOREIGN KEY (COMPUTE_RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE
@@ -431,8 +437,8 @@ CREATE TABLE STORAGE_RESOURCE
         STORAGE_RESOURCE_ID VARCHAR (255) NOT NULL,
         HOST_NAME VARCHAR (255) NOT NULL,
         DESCRIPTION VARCHAR (255),
-        ENABLED SMALLINT,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
+        ENABLED SMALLINT ,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (STORAGE_RESOURCE_ID)
 );
@@ -440,10 +446,10 @@ CREATE TABLE STORAGE_RESOURCE
 CREATE TABLE STORAGE_INTERFACE
 (
         STORAGE_RESOURCE_ID VARCHAR (255) NOT NULL,
-        DATA_MOVEMENT_PROTOCOL VARCHAR (255) NOT NULL,
         DATA_MOVEMENT_INTERFACE_ID VARCHAR (255) NOT NULL,
+        DATA_MOVEMENT_PROTOCOL VARCHAR (255) NOT NULL,
         PRIORITY_ORDER INTEGER,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (STORAGE_RESOURCE_ID,DATA_MOVEMENT_INTERFACE_ID),
         FOREIGN KEY (STORAGE_RESOURCE_ID) REFERENCES STORAGE_RESOURCE(STORAGE_RESOURCE_ID) ON DELETE CASCADE
@@ -472,19 +478,13 @@ CREATE TABLE LOCAL_SUBMISSION
         RESOURCE_JOB_MANAGER_ID VARCHAR (255) NOT NULL,
         JOB_SUBMISSION_INTERFACE_ID VARCHAR (255) NOT NULL,
         SECURITY_PROTOCOL VARCHAR (255) NOT NULL,
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         PRIMARY KEY (JOB_SUBMISSION_INTERFACE_ID),
         FOREIGN KEY (RESOURCE_JOB_MANAGER_ID) REFERENCES RESOURCE_JOB_MANAGER(RESOURCE_JOB_MANAGER_ID)
 );
 
-CREATE TABLE LOCAL_DATA_MOVEMENT
-(
-        DATA_MOVEMENT_INTERFACE_ID VARCHAR (255) NOT NULL,
-        PRIMARY KEY (DATA_MOVEMENT_INTERFACE_ID)
-);
-
-CREATE TABLE DATA_STORAGE_PREFERENCE
+CREATE TABLE STORAGE_PREFERENCE
 (
         GATEWAY_ID VARCHAR(255),
         STORAGE_RESOURCE_ID VARCHAR(255),
@@ -495,13 +495,20 @@ CREATE TABLE DATA_STORAGE_PREFERENCE
         FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY_PROFILE(GATEWAY_ID) ON DELETE CASCADE
 );
 
+CREATE TABLE LOCAL_DATA_MOVEMENT
+(
+	     DATA_MOVEMENT_INTERFACE_ID VARCHAR (255) NOT NULL,
+	     PRIMARY KEY (DATA_MOVEMENT_INTERFACE_ID)
+);
+
 CREATE TABLE WORKFLOW
 (
         WF_TEMPLATE_ID VARCHAR (255) NOT NULL,
         WF_NAME VARCHAR (255) NOT NULL,
-        GRAPH LONGTEXT,
+        GRAPH CLOB,
         OWNER VARCHAR(255),
-        CREATION_TIME TIMESTAMP DEFAULT NOW(),
+        GATEWAY_ID VARCHAR(255),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
         UPDATE_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
         IMAGE BLOB,
         PRIMARY KEY (WF_TEMPLATE_ID)
@@ -511,7 +518,7 @@ CREATE TABLE WORKFLOW_INPUT
 (
          WF_TEMPLATE_ID VARCHAR(255),
          INPUT_KEY VARCHAR(255),
-         INPUT_VALUE LONGTEXT,
+         INPUT_VALUE CLOB,
          DATA_TYPE VARCHAR(255),
          METADATA VARCHAR(255),
          APP_ARGUMENT VARCHAR(255),
@@ -525,7 +532,7 @@ CREATE TABLE WORKFLOW_OUTPUT
 (
          WF_TEMPLATE_ID VARCHAR(255),
          OUTPUT_KEY VARCHAR(255),
-         OUTPUT_VALUE LONGTEXT,
+         OUTPUT_VALUE CLOB,
          DATA_TYPE VARCHAR(255),
          PRIMARY KEY(WF_TEMPLATE_ID,OUTPUT_KEY),
          FOREIGN KEY (WF_TEMPLATE_ID) REFERENCES WORKFLOW(WF_TEMPLATE_ID) ON DELETE CASCADE
@@ -533,14 +540,14 @@ CREATE TABLE WORKFLOW_OUTPUT
 
 CREATE TABLE USER_RESOURCE_PROFILE (
   USER_ID varchar(255) NOT NULL,
-  CREATION_TIME datetime DEFAULT NULL,
+  CREATION_TIME TIMESTAMP DEFAULT NULL,
   CS_TOKEN varchar(255) DEFAULT NULL,
   GATEWAY_ID varchar(255) DEFAULT NULL,
   IDENTITY_SERVER_PWD_CRED_TOKEN varchar(255) DEFAULT NULL,
   IDENTITY_SERVER_TENANT varchar(255) DEFAULT NULL,
-  UPDATE_TIME datetime DEFAULT NULL,
+  UPDATE_TIME TIMESTAMP DEFAULT NULL,
   PRIMARY KEY (USER_ID,GATEWAY_ID)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE USER_STORAGE_PREFERENCE (
   STORAGE_RESOURCE_ID varchar(255) NOT NULL,
@@ -550,7 +557,7 @@ CREATE TABLE USER_STORAGE_PREFERENCE (
   GATEWAY_ID varchar(255) DEFAULT NULL,
   LOGIN_USERNAME varchar(255) DEFAULT NULL,
   PRIMARY KEY (STORAGE_RESOURCE_ID,USER_ID,GATEWAY_ID)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE USER_COMPUTE_RESOURCE_PREFERENCE (
   RESOURCE_ID varchar(255) NOT NULL,
@@ -562,12 +569,12 @@ CREATE TABLE USER_COMPUTE_RESOURCE_PREFERENCE (
   ALLOCATION_PROJECT_NUMBER varchar(255) DEFAULT NULL,
   QUALITY_OF_SERVICE varchar(255) DEFAULT NULL,
   RESERVATION varchar(255) DEFAULT NULL,
-  RESERVATION_END_TIME datetime DEFAULT NULL,
-  RESERVATION_START_TIME datetime DEFAULT NULL,
-  SCRATCH_LOCATION varchar(255) NOT NULL DEFAULT NULL,
-  VALIDATED TINYINT(1) DEFAULT 0,
+  RESERVATION_END_TIME TIMESTAMP DEFAULT NULL,
+  RESERVATION_START_TIME TIMESTAMP DEFAULT NULL,
+  SCRATCH_LOCATION varchar(255) DEFAULT NULL,
+  VALIDATED SMALLINT NOT NULL DEFAULT 0,
   PRIMARY KEY (RESOURCE_ID,USER_ID,GATEWAY_ID)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE GROUP_RESOURCE_PROFILE (
   GATEWAY_ID varchar(255) NOT NULL,
@@ -577,7 +584,7 @@ CREATE TABLE GROUP_RESOURCE_PROFILE (
   UPDATE_TIME BIGINT NOT NULL,
   PRIMARY KEY (GROUP_RESOURCE_PROFILE_ID),
   UNIQUE (GATEWAY_ID, GROUP_RESOURCE_PROFILE_NAME)
-)ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE BATCH_QUEUE_RESOURCE_POLICY (
   RESOURCE_POLICY_ID varchar(255) NOT NULL,
@@ -590,7 +597,7 @@ CREATE TABLE BATCH_QUEUE_RESOURCE_POLICY (
   PRIMARY KEY (RESOURCE_POLICY_ID),
   FOREIGN KEY (COMPUTE_RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE,
   FOREIGN KEY (GROUP_RESOURCE_PROFILE_ID) REFERENCES GROUP_RESOURCE_PROFILE(GROUP_RESOURCE_PROFILE_ID) ON DELETE CASCADE
-)ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE COMPUTE_RESOURCE_POLICY (
     RESOURCE_POLICY_ID varchar(255) NOT NULL,
@@ -599,7 +606,7 @@ CREATE TABLE COMPUTE_RESOURCE_POLICY (
     PRIMARY KEY (RESOURCE_POLICY_ID),
     FOREIGN KEY (COMPUTE_RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE,
     FOREIGN KEY (GROUP_RESOURCE_PROFILE_ID) REFERENCES GROUP_RESOURCE_PROFILE(GROUP_RESOURCE_PROFILE_ID) ON DELETE CASCADE
-)ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE COMPUTE_RESOURCE_POLICY_QUEUES (
     RESOURCE_POLICY_ID varchar(255) NOT NULL,
@@ -608,7 +615,7 @@ CREATE TABLE COMPUTE_RESOURCE_POLICY_QUEUES (
     PRIMARY KEY (RESOURCE_POLICY_ID, QUEUE_NAME),
     FOREIGN KEY (RESOURCE_POLICY_ID) REFERENCES COMPUTE_RESOURCE_POLICY(RESOURCE_POLICY_ID) ON DELETE CASCADE,
     FOREIGN KEY (COMPUTE_RESOURCE_ID,QUEUE_NAME) REFERENCES BATCH_QUEUE(COMPUTE_RESOURCE_ID,QUEUE_NAME) ON DELETE CASCADE
-)ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE GROUP_COMPUTE_RESOURCE_PREFERENCE
 (
@@ -632,7 +639,7 @@ CREATE TABLE GROUP_COMPUTE_RESOURCE_PREFERENCE
         PRIMARY KEY(RESOURCE_ID,GROUP_RESOURCE_PROFILE_ID),
         FOREIGN KEY (RESOURCE_ID) REFERENCES COMPUTE_RESOURCE(RESOURCE_ID) ON DELETE CASCADE,
         FOREIGN KEY (GROUP_RESOURCE_PROFILE_ID) REFERENCES GROUP_RESOURCE_PROFILE(GROUP_RESOURCE_PROFILE_ID)
-)ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE GRP_SSH_ACC_PROV_CONFIG
 (
@@ -642,14 +649,35 @@ CREATE TABLE GRP_SSH_ACC_PROV_CONFIG
         CONFIG_VALUE VARCHAR(255),
         PRIMARY KEY (RESOURCE_ID, CONFIG_NAME, GROUP_RESOURCE_PROFILE_ID),
         FOREIGN KEY (RESOURCE_ID,GROUP_RESOURCE_PROFILE_ID) REFERENCES GROUP_COMPUTE_RESOURCE_PREFERENCE (RESOURCE_ID,GROUP_RESOURCE_PROFILE_ID) ON DELETE CASCADE
-)ENGINE=InnoDB DEFAULT CHARSET=latin1;
+);
 
 CREATE TABLE CONFIGURATION
 (
-          CONFIG_KEY VARCHAR(255),
-          CONFIG_VAL VARCHAR(255),
-          PRIMARY KEY(CONFIG_KEY, CONFIG_VAL)
+        CONFIG_KEY VARCHAR(255),
+        CONFIG_VAL VARCHAR(255),
+        PRIMARY KEY(CONFIG_KEY, CONFIG_VAL)
+);
+
+CREATE TABLE GATEWAY_GROUPS
+(
+        GATEWAY_ID VARCHAR(255),
+        ADMINS_GROUP_ID VARCHAR(255),
+        READ_ONLY_ADMINS_GROUP_ID VARCHAR(255),
+        DEFAULT_GATEWAY_USERS_GROUP_ID VARCHAR(255),
+        PRIMARY KEY(GATEWAY_ID)
+);
+
+CREATE TABLE CLOUD_JOB_SUBMISSION (
+  JOB_SUBMISSION_INTERFACE_ID varchar(255) NOT NULL,
+  EXECUTABLE_TYPE VARCHAR(255) DEFAULT NULL,
+  NODE_ID VARCHAR(255) DEFAULT NULL,
+  PROVIDER_NAME VARCHAR(255) DEFAULT NULL,
+  SECURITY_PROTOCOL VARCHAR(255) DEFAULT NULL,
+  USER_ACCOUNT_NAME VARCHAR(255) DEFAULT NULL,
+  PRIMARY KEY (JOB_SUBMISSION_INTERFACE_ID)
 );
 
 INSERT INTO CONFIGURATION (CONFIG_KEY, CONFIG_VAL) VALUES('app_catalog_version', '0.16');
 
+
+
diff --git a/modules/registry-refactoring/src/test/resources/expcatalog-derby.sql b/modules/registry-refactoring/src/test/resources/expcatalog-derby.sql
new file mode 100644
index 0000000..06efd1a
--- /dev/null
+++ b/modules/registry-refactoring/src/test/resources/expcatalog-derby.sql
@@ -0,0 +1,398 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+CREATE TABLE GATEWAY
+(
+        GATEWAY_ID VARCHAR(255),
+        GATEWAY_NAME VARCHAR(255),
+	      DOMAIN VARCHAR(255),
+	      EMAIL_ADDRESS VARCHAR(255),
+        GATEWAY_ACRONYM varchar(255),
+        GATEWAY_ADMIN_EMAIL varchar(255),
+        GATEWAY_ADMIN_FIRST_NAME varchar(255),
+        GATEWAY_APPROVAL_STATUS varchar(255),
+        GATEWAY_PUBLIC_ABSTRACT varchar(255),
+        GATEWAY_URL varchar(255),
+        GATEWAY_ADMIN_LAST_NAME varchar(255),
+        IDENTITY_SERVER_PASSWORD_TOKEN varchar(255),
+        IDENTITY_SERVER_USERNAME varchar(255),
+        GATEWAY_REVIEW_PROPOSAL_DESCRIPTION varchar(255),
+        DECLINED_REASON varchar(255),
+        OAUTH_CLIENT_SECRET varchar(255),
+        OAUTH_CLIENT_ID varchar(255),
+        REQUEST_CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        REQUESTER_USERNAME VARCHAR(255),
+        PRIMARY KEY (GATEWAY_ID)
+);
+
+CREATE TABLE NOTIFICATION
+(
+        NOTIFICATION_ID VARCHAR(255),
+        GATEWAY_ID VARCHAR(255),
+        TITLE VARCHAR(255),
+        PRIORITY VARCHAR(255),
+	      NOTIFICATION_MESSAGE VARCHAR(4096),
+	      PUBLISHED_DATE TIMESTAMP,
+	      EXPIRATION_DATE TIMESTAMP,
+	      CREATION_DATE TIMESTAMP,
+        PRIMARY KEY (NOTIFICATION_ID)
+);
+
+CREATE TABLE USERS
+(
+        AIRAVATA_INTERNAL_USER_ID VARCHAR(255),
+        USER_NAME VARCHAR(255),
+        PASSWORD VARCHAR(255),
+        GATEWAY_ID VARCHAR(255),
+        PRIMARY KEY (GATEWAY_ID, USER_NAME),
+        FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE GATEWAY_WORKER
+(
+        GATEWAY_ID VARCHAR(255),
+        USER_NAME VARCHAR(255),
+        PRIMARY KEY (GATEWAY_ID, USER_NAME),
+        FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE PROJECT
+(
+        GATEWAY_ID VARCHAR(255),
+        USER_NAME VARCHAR(255),
+        PROJECT_NAME VARCHAR(255),
+        PROJECT_ID VARCHAR(255),
+        DESCRIPTION VARCHAR(255),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        PRIMARY KEY (PROJECT_ID),
+        FOREIGN KEY (GATEWAY_ID) REFERENCES GATEWAY(GATEWAY_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE PROJECT_USER
+(
+        PROJECT_ID VARCHAR(255),
+        USER_NAME VARCHAR(255),
+        PRIMARY KEY (PROJECT_ID,USER_NAME),
+        FOREIGN KEY (PROJECT_ID) REFERENCES PROJECT(PROJECT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE EXPERIMENT (
+        EXPERIMENT_ID varchar(255),
+        PROJECT_ID varchar(255),
+        GATEWAY_ID varchar(255),
+        EXPERIMENT_TYPE varchar(255),
+        USER_NAME varchar(255),
+        EXPERIMENT_NAME varchar(255),
+        CREATION_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        DESCRIPTION varchar(255),
+        EXECUTION_ID varchar(255),
+        GATEWAY_EXECUTION_ID varchar(255),
+        GATEWAY_INSTANCE_ID varchar(255),
+        ENABLE_EMAIL_NOTIFICATION SMALLINT,
+        EMAIL_ADDRESSES CLOB,
+        PRIMARY KEY (EXPERIMENT_ID),
+        FOREIGN KEY (PROJECT_ID) REFERENCES PROJECT(PROJECT_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE EXPERIMENT_INPUT
+(
+        EXPERIMENT_ID varchar(255),
+        INPUT_NAME varchar(255),
+        INPUT_VALUE CLOB,
+        DATA_TYPE varchar(255),
+        APPLICATION_ARGUMENT varchar(255),
+        STANDARD_INPUT SMALLINT,
+        USER_FRIENDLY_DESCRIPTION varchar(255),
+        METADATA varchar(255),
+        INPUT_ORDER INT,
+        IS_REQUIRED SMALLINT,
+        REQUIRED_TO_ADDED_TO_CMD SMALLINT,
+        DATA_STAGED SMALLINT,
+        STORAGE_RESOURCE_ID varchar(255),
+        IS_READ_ONLY SMALLINT,
+        PRIMARY KEY(EXPERIMENT_ID,INPUT_NAME),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE EXPERIMENT_OUTPUT
+(
+        EXPERIMENT_ID varchar(255),
+        OUTPUT_NAME varchar(255),
+        OUTPUT_VALUE CLOB,
+        DATA_TYPE varchar(255),
+        APPLICATION_ARGUMENT varchar(255),
+        IS_REQUIRED SMALLINT,
+        REQUIRED_TO_ADDED_TO_CMD SMALLINT,
+        DATA_MOVEMENT SMALLINT,
+        LOCATION varchar(255),
+        SEARCH_QUERY varchar(255),
+        OUTPUT_STREAMING SMALLINT,
+        STORAGE_RESOURCE_ID varchar(255),
+        PRIMARY KEY(EXPERIMENT_ID,OUTPUT_NAME),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE EXPERIMENT_STATUS (
+        STATUS_ID varchar(255),
+        EXPERIMENT_ID varchar(255),
+        STATE varchar(255),
+        TIME_OF_STATE_CHANGE TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        REASON CLOB,
+        PRIMARY KEY (STATUS_ID, EXPERIMENT_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE EXPERIMENT_ERROR (
+        ERROR_ID varchar(255),
+        EXPERIMENT_ID varchar(255),
+        CREATION_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        ACTUAL_ERROR_MESSAGE CLOB,
+        USER_FRIENDLY_MESSAGE CLOB,
+        TRANSIENT_OR_PERSISTENT SMALLINT,
+        ROOT_CAUSE_ERROR_ID_LIST CLOB,
+        PRIMARY KEY (ERROR_ID, EXPERIMENT_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE USER_CONFIGURATION_DATA (
+        EXPERIMENT_ID varchar(255),
+        AIRAVATA_AUTO_SCHEDULE SMALLINT,
+        OVERRIDE_MANUAL_SCHEDULED_PARAMS SMALLINT,
+        SHARE_EXPERIMENT_PUBLICALLY SMALLINT,
+        THROTTLE_RESOURCES SMALLINT,
+        USER_DN varchar(255),
+        GENERATE_CERT SMALLINT,
+        RESOURCE_HOST_ID varchar(255),
+        TOTAL_CPU_COUNT INT,
+        NODE_COUNT INT,
+        NUMBER_OF_THREADS INT,
+        QUEUE_NAME varchar(255),
+        WALL_TIME_LIMIT INT,
+        TOTAL_PHYSICAL_MEMORY INT,
+        STATIC_WORKING_DIR varchar(255),
+        OVERRIDE_LOGIN_USER_NAME varchar(255),
+        OVERRIDE_SCRATCH_LOCATION varchar(255),
+        OVERRIDE_ALLOCATION_PROJECT_NUMBER varchar(255),
+        STORAGE_RESOURCE_ID varchar(255),
+        EXPERIMENT_DATA_DIR VARCHAR (512),
+        GROUP_RESOURCE_PROFILE_ID VARCHAR(255) DEFAULT NULL,
+        IS_USE_USER_CR_PREF BOOLEAN,
+        PRIMARY KEY (EXPERIMENT_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+CREATE VIEW LATEST_EXPERIMENT_STATUS AS
+  select ES1.EXPERIMENT_ID AS EXPERIMENT_ID, ES1.STATE AS STATE, ES1.TIME_OF_STATE_CHANGE AS TIME_OF_STATE_CHANGE
+  from EXPERIMENT_STATUS ES1 LEFT JOIN EXPERIMENT_STATUS ES2 ON (ES1.EXPERIMENT_ID = ES2.EXPERIMENT_ID
+    AND ES1.TIME_OF_STATE_CHANGE < ES2.TIME_OF_STATE_CHANGE)  WHERE ES2.TIME_OF_STATE_CHANGE is NULL;
+
+CREATE VIEW EXPERIMENT_SUMMARY AS
+  select E.EXPERIMENT_ID AS EXPERIMENT_ID, E.PROJECT_ID AS PROJECT_ID, E.GATEWAY_ID AS GATEWAY_ID,
+  E.USER_NAME AS USER_NAME, E.EXECUTION_ID AS EXECUTION_ID, E.EXPERIMENT_NAME AS EXPERIMENT_NAME,
+  E.CREATION_TIME AS CREATION_TIME, E.DESCRIPTION AS DESCRIPTION, ES.STATE AS STATE, UD.RESOURCE_HOST_ID
+  AS RESOURCE_HOST_ID, ES.TIME_OF_STATE_CHANGE AS TIME_OF_STATE_CHANGE
+    from ((EXPERIMENT E left join LATEST_EXPERIMENT_STATUS ES on((E.EXPERIMENT_ID = ES.EXPERIMENT_ID)))
+    left join USER_CONFIGURATION_DATA UD on((E.EXPERIMENT_ID = UD.EXPERIMENT_ID))) where true;
+
+CREATE TABLE PROCESS (
+        PROCESS_ID varchar(255),
+        EXPERIMENT_ID varchar(255),
+        CREATION_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        LAST_UPDATE_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        PROCESS_DETAIL CLOB,
+        APPLICATION_INTERFACE_ID varchar(255),
+        TASK_DAG CLOB,
+        APPLICATION_DEPLOYMENT_ID varchar(255),
+        COMPUTE_RESOURCE_ID varchar(255),
+        GATEWAY_EXECUTION_ID varchar(255),
+        ENABLE_EMAIL_NOTIFICATION SMALLINT,
+        EMAIL_ADDRESSES CLOB,
+        STORAGE_RESOURCE_ID varchar(255),
+        USER_DN varchar(255),
+        GENERATE_CERT SMALLINT,
+        EXPERIMENT_DATA_DIR VARCHAR (512),
+        USERNAME VARCHAR (255),
+        GROUP_RESOURCE_PROFILE_ID VARCHAR (255) DEFAULT NULL,
+        USE_USER_CR_PREF BOOLEAN,
+        PRIMARY KEY (PROCESS_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE PROCESS_INPUT
+(
+        PROCESS_ID varchar(255),
+        INPUT_NAME varchar(255),
+        INPUT_VALUE CLOB,
+        DATA_TYPE varchar(255),
+        APPLICATION_ARGUMENT varchar(255),
+        STANDARD_INPUT SMALLINT,
+        USER_FRIENDLY_DESCRIPTION varchar(255),
+        METADATA varchar(255),
+        INPUT_ORDER INT,
+        IS_REQUIRED SMALLINT,
+        REQUIRED_TO_ADDED_TO_CMD SMALLINT,
+        DATA_STAGED SMALLINT,
+        STORAGE_RESOURCE_ID varchar(255),
+        IS_READ_ONLY SMALLINT,
+        PRIMARY KEY(PROCESS_ID,INPUT_NAME),
+        FOREIGN KEY (PROCESS_ID) REFERENCES PROCESS(PROCESS_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE PROCESS_OUTPUT
+(
+        PROCESS_ID varchar(255),
+        OUTPUT_NAME varchar(255),
+        OUTPUT_VALUE CLOB,
+        DATA_TYPE varchar(255),
+        APPLICATION_ARGUMENT varchar(255),
+        IS_REQUIRED SMALLINT,
+        REQUIRED_TO_ADDED_TO_CMD SMALLINT,
+        DATA_MOVEMENT SMALLINT,
+        LOCATION varchar(255),
+        SEARCH_QUERY varchar(255),
+        OUTPUT_STREAMING SMALLINT,
+        STORAGE_RESOURCE_ID varchar(255),
+        PRIMARY KEY(PROCESS_ID,OUTPUT_NAME),
+        FOREIGN KEY (PROCESS_ID) REFERENCES PROCESS(PROCESS_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE PROCESS_STATUS (
+        STATUS_ID varchar(255),
+        PROCESS_ID varchar(255),
+        STATE varchar(255),
+        TIME_OF_STATE_CHANGE TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        REASON CLOB,
+        PRIMARY KEY (STATUS_ID, PROCESS_ID),
+        FOREIGN KEY (PROCESS_ID) REFERENCES PROCESS(PROCESS_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE PROCESS_ERROR (
+        ERROR_ID varchar(255),
+        PROCESS_ID varchar(255),
+        CREATION_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        ACTUAL_ERROR_MESSAGE CLOB,
+        USER_FRIENDLY_MESSAGE CLOB,
+        TRANSIENT_OR_PERSISTENT SMALLINT,
+        ROOT_CAUSE_ERROR_ID_LIST CLOB,
+        PRIMARY KEY (ERROR_ID, PROCESS_ID),
+        FOREIGN KEY (PROCESS_ID) REFERENCES PROCESS(PROCESS_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE PROCESS_RESOURCE_SCHEDULE (
+        PROCESS_ID varchar(255),
+        RESOURCE_HOST_ID varchar(255),
+        TOTAL_CPU_COUNT INT,
+        NODE_COUNT INT,
+        NUMBER_OF_THREADS INT,
+        QUEUE_NAME varchar(255),
+        WALL_TIME_LIMIT INT,
+        TOTAL_PHYSICAL_MEMORY INT,
+        STATIC_WORKING_DIR varchar(255),
+        PRIMARY KEY (PROCESS_ID),
+        FOREIGN KEY (PROCESS_ID) REFERENCES PROCESS(PROCESS_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE TASK (
+        TASK_ID varchar(255),
+        TASK_TYPE varchar(255),
+        PARENT_PROCESS_ID varchar(255),
+        CREATION_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        LAST_UPDATE_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        TASK_DETAIL CLOB,
+        TASK_INTERNAL_STORE CHAR,
+        PRIMARY KEY (TASK_ID),
+        FOREIGN KEY (PARENT_PROCESS_ID) REFERENCES PROCESS(PROCESS_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE TASK_STATUS (
+        STATUS_ID varchar(255),
+        TASK_ID varchar(255),
+        STATE varchar(255),
+        TIME_OF_STATE_CHANGE TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        REASON CLOB,
+        PRIMARY KEY (STATUS_ID, TASK_ID),
+        FOREIGN KEY (TASK_ID) REFERENCES TASK(TASK_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE TASK_ERROR (
+        ERROR_ID varchar(255),
+        TASK_ID varchar(255),
+        CREATION_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        ACTUAL_ERROR_MESSAGE CLOB,
+        USER_FRIENDLY_MESSAGE CLOB,
+        TRANSIENT_OR_PERSISTENT SMALLINT,
+        ROOT_CAUSE_ERROR_ID_LIST CLOB,
+        PRIMARY KEY (ERROR_ID, TASK_ID),
+        FOREIGN KEY (TASK_ID) REFERENCES TASK(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE JOB (
+        JOB_ID varchar(255),
+        TASK_ID varchar(255),
+        PROCESS_ID varchar(255),
+        JOB_DESCRIPTION CLOB NOT NULL,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        COMPUTE_RESOURCE_CONSUMED varchar(255),
+        JOB_NAME varchar(255),
+        WORKING_DIR varchar(255),
+        STD_OUT CLOB,
+        STD_ERR CLOB,
+        EXIT_CODE INT,
+        PRIMARY KEY (JOB_ID, TASK_ID),
+        FOREIGN KEY (TASK_ID) REFERENCES TASK(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE JOB_STATUS (
+        STATUS_ID varchar(255),
+        JOB_ID varchar(255),
+        TASK_ID varchar(255),
+        STATE varchar(255),
+        TIME_OF_STATE_CHANGE TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        REASON CLOB,
+        PRIMARY KEY (STATUS_ID, JOB_ID, TASK_ID),
+        FOREIGN KEY (JOB_ID, TASK_ID) REFERENCES JOB(JOB_ID, TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE QUEUE_STATUS(
+        HOST_NAME VARCHAR(255),
+        QUEUE_NAME VARCHAR(255),
+        CREATED_TIME BIGINT,
+        QUEUE_UP BOOLEAN,
+        RUNNING_JOBS INT,
+        QUEUED_JOBS INT,
+        PRIMARY KEY (HOST_NAME, QUEUE_NAME, CREATED_TIME)
+);
+
+CREATE TABLE CONFIGURATION
+(
+        CONFIG_KEY VARCHAR(255),
+        CONFIG_VAL VARCHAR(255),
+        EXPIRE_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        CATEGORY_ID VARCHAR (255),
+        PRIMARY KEY(CONFIG_KEY, CONFIG_VAL, CATEGORY_ID)
+);
+
+INSERT INTO CONFIGURATION (CONFIG_KEY, CONFIG_VAL, EXPIRE_DATE, CATEGORY_ID) VALUES('registry.version', '0.16', CURRENT_TIMESTAMP ,'SYSTEM');
\ No newline at end of file
diff --git a/modules/registry-refactoring/src/test/resources/replicacatalog-derby.sql b/modules/registry-refactoring/src/test/resources/replicacatalog-derby.sql
new file mode 100644
index 0000000..f510f36
--- /dev/null
+++ b/modules/registry-refactoring/src/test/resources/replicacatalog-derby.sql
@@ -0,0 +1,77 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+CREATE TABLE DATA_PRODUCT
+(
+        PRODUCT_URI VARCHAR (255),
+        GATEWAY_ID VARCHAR (255),
+        PRODUCT_NAME VARCHAR (255),
+        PRODUCT_DESCRIPTION VARCHAR (1024),
+        PARENT_PRODUCT_URI VARCHAR (255),
+        OWNER_NAME VARCHAR (255),
+        PRODUCT_SIZE INTEGER ,
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        LAST_MODIFIED_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        PRIMARY KEY (PRODUCT_URI)
+);
+
+CREATE TABLE DATA_REPLICA_LOCATION
+(
+        REPLICA_ID VARCHAR (255),
+        PRODUCT_URI VARCHAR (255) NOT NULL,
+        REPLICA_NAME VARCHAR (255),
+        REPLICA_DESCRIPTION VARCHAR (1024),
+        STORAGE_RESOURCE_ID VARCHAR (255),
+        FILE_PATH VARCHAR (4096),
+        CREATION_TIME TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+        LAST_MODIFIED_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        VALID_UNTIL_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        PRIMARY KEY (REPLICA_ID),
+        FOREIGN KEY (PRODUCT_URI) REFERENCES DATA_PRODUCT(PRODUCT_URI) ON DELETE CASCADE
+);
+
+CREATE TABLE DATA_PRODUCT_METADATA
+(
+        PRODUCT_URI VARCHAR(255),
+        METADATA_KEY VARCHAR(255),
+        METADATA_VALUE VARCHAR(2048),
+        PRIMARY KEY(PRODUCT_URI, METADATA_KEY),
+        FOREIGN KEY (PRODUCT_URI) REFERENCES DATA_PRODUCT(PRODUCT_URI) ON DELETE CASCADE
+);
+
+CREATE TABLE DATA_REPLICA_METADATA
+(
+        REPLICA_ID VARCHAR(255),
+        METADATA_KEY VARCHAR(255),
+        METADATA_VALUE VARCHAR(2048),
+        PRIMARY KEY(REPLICA_ID, METADATA_KEY),
+        FOREIGN KEY (REPLICA_ID) REFERENCES DATA_REPLICA_LOCATION(REPLICA_ID) ON DELETE CASCADE
+);
+
+
+CREATE TABLE CONFIGURATION
+(
+        CONFIG_KEY VARCHAR(255),
+        CONFIG_VAL VARCHAR(255),
+        PRIMARY KEY(CONFIG_KEY, CONFIG_VAL)
+);
+
+INSERT INTO CONFIGURATION (CONFIG_KEY, CONFIG_VAL) VALUES('data_catalog_version', '0.16');
\ No newline at end of file
diff --git a/modules/registry-refactoring/src/test/resources/workflowcatalog-derby.sql b/modules/registry-refactoring/src/test/resources/workflowcatalog-derby.sql
new file mode 100644
index 0000000..51a6ddf
--- /dev/null
+++ b/modules/registry-refactoring/src/test/resources/workflowcatalog-derby.sql
@@ -0,0 +1,128 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+CREATE TABLE WORKFLOW
+(
+        TEMPLATE_ID VARCHAR (255) NOT NULL,
+        WORKFLOW_NAME VARCHAR (255) NOT NULL,
+        CREATED_USER VARCHAR (255),
+        GATEWAY_ID VARCHAR (255),
+        GRAPH CLOB,
+        IMAGE BLOB,
+        CREATION_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        UPDATE_TIME timestamp DEFAULT '0000-00-00 00:00:00',
+        PRIMARY KEY (TEMPLATE_ID)
+);
+
+CREATE TABLE WORKFLOW_INPUT
+(
+         TEMPLATE_ID VARCHAR(255),
+         INPUT_KEY VARCHAR(255),
+         INPUT_VALUE CLOB,
+         DATA_TYPE VARCHAR(255),
+         METADATA VARCHAR(255),
+         APP_ARGUMENT VARCHAR(255),
+         STANDARD_INPUT SMALLINT,
+         USER_FRIENDLY_DESC VARCHAR(255),
+         INPUT_ORDER INTEGER,
+         IS_REQUIRED SMALLINT,
+         REQUIRED_TO_COMMANDLINE SMALLINT,
+         DATA_STAGED SMALLINT,
+         PRIMARY KEY(TEMPLATE_ID,INPUT_KEY),
+         FOREIGN KEY (TEMPLATE_ID) REFERENCES WORKFLOW(TEMPLATE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE WORKFLOW_OUTPUT
+(
+         TEMPLATE_ID VARCHAR(255),
+         OUTPUT_KEY VARCHAR(255),
+         OUTPUT_VALUE VARCHAR(255),
+         DATA_TYPE VARCHAR(255),
+         IS_REQUIRED SMALLINT,
+         REQUIRED_TO_COMMANDLINE SMALLINT,
+         DATA_MOVEMENT SMALLINT,
+         DATA_NAME_LOCATION VARCHAR(255),
+         SEARCH_QUERY VARCHAR(255),
+         APP_ARGUMENT VARCHAR(255),
+         OUTPUT_STREAMING SMALLINT,
+         PRIMARY KEY(TEMPLATE_ID,OUTPUT_KEY),
+         FOREIGN KEY (TEMPLATE_ID) REFERENCES WORKFLOW(TEMPLATE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE COMPONENT_STATUS
+(
+        STATUS_ID VARCHAR (255) NOT NULL,
+        TEMPLATE_ID VARCHAR (255) NOT NULL,
+        STATE VARCHAR(255),
+        REASON VARCHAR(255),
+        UPDATE_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        PRIMARY KEY (STATUS_ID),
+        FOREIGN KEY (TEMPLATE_ID) REFERENCES WORKFLOW(TEMPLATE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE WORKFLOW_STATUS
+(
+        STATUS_ID VARCHAR (255) NOT NULL,
+        TEMPLATE_ID VARCHAR (255) NOT NULL,
+        STATE VARCHAR(255),
+        REASON VARCHAR(255),
+        UPDATE_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        PRIMARY KEY (STATUS_ID, TEMPLATE_ID),
+        FOREIGN KEY (TEMPLATE_ID) REFERENCES WORKFLOW(TEMPLATE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE EDGE
+(
+        EDGE_ID VARCHAR (255) NOT NULL,
+        TEMPLATE_ID VARCHAR (255) NOT NULL,
+        NAME VARCHAR (255),
+        COMPONENT_STATUS_ID VARCHAR(255),
+        DESCRIPTION VARCHAR(500),
+        CREATED_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        PRIMARY KEY (EDGE_ID, TEMPLATE_ID),
+        FOREIGN KEY (TEMPLATE_ID) REFERENCES WORKFLOW(TEMPLATE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE PORT
+(
+        PORT_ID VARCHAR (255) NOT NULL,
+        TEMPLATE_ID VARCHAR (255) NOT NULL,
+        NAME VARCHAR (255),
+        COMPONENT_STATUS_ID VARCHAR(255),
+        DESCRIPTION VARCHAR(500),
+        CREATED_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        PRIMARY KEY (PORT_ID, TEMPLATE_ID),
+        FOREIGN KEY (TEMPLATE_ID) REFERENCES WORKFLOW(TEMPLATE_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE NODE
+(
+        NODE_ID VARCHAR (255) NOT NULL,
+        TEMPLATE_ID VARCHAR (255) NOT NULL,
+        NAME VARCHAR (255),
+        APPLICATION_ID VARCHAR (255),
+        APPLICATION_NAME VARCHAR (255),
+        COMPONENT_STATUS_ID VARCHAR(255),
+        DESCRIPTION VARCHAR(500),
+        CREATED_TIME timestamp DEFAULT CURRENT_TIMESTAMP,
+        PRIMARY KEY (NODE_ID, TEMPLATE_ID),
+        FOREIGN KEY (TEMPLATE_ID) REFERENCES WORKFLOW(TEMPLATE_ID) ON DELETE CASCADE
+);
\ No newline at end of file
diff --git a/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/impl/ApplicationDeploymentImpl.java b/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/impl/ApplicationDeploymentImpl.java
index 9a9d03c..86e1bdd 100644
--- a/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/impl/ApplicationDeploymentImpl.java
+++ b/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/impl/ApplicationDeploymentImpl.java
@@ -331,7 +331,7 @@ public class ApplicationDeploymentImpl implements ApplicationDeployment {
     }
 
     @Override
-    public List<ApplicationDeploymentDescription> getApplicationDeployements(Map<String, String> filters) throws AppCatalogException {
+    public List<ApplicationDeploymentDescription> getApplicationDeployments(Map<String, String> filters) throws AppCatalogException {
         List<ApplicationDeploymentDescription> deploymentDescriptions = new ArrayList<ApplicationDeploymentDescription>();
         try {
             AppDeploymentResource resource = new AppDeploymentResource();
@@ -396,7 +396,7 @@ public class ApplicationDeploymentImpl implements ApplicationDeployment {
     }
 
     @Override
-    public List<ApplicationDeploymentDescription> getAccessibleApplicationDeployements (String gatewayId, List<String> accessibleAppIds, List<String> accessibleComputeResourceIds) throws AppCatalogException {
+    public List<ApplicationDeploymentDescription> getAccessibleApplicationDeployments(String gatewayId, List<String> accessibleAppIds, List<String> accessibleComputeResourceIds) throws AppCatalogException {
         List<ApplicationDeploymentDescription> deploymentDescriptions = new ArrayList<ApplicationDeploymentDescription>();
         try {
             AppDeploymentResource resource = new AppDeploymentResource();
diff --git a/modules/registry/registry-core/src/test/java/org/apache/airavata/app/catalog/AppDeploymentTest.java b/modules/registry/registry-core/src/test/java/org/apache/airavata/app/catalog/AppDeploymentTest.java
index ae92c77..476db00 100644
--- a/modules/registry/registry-core/src/test/java/org/apache/airavata/app/catalog/AppDeploymentTest.java
+++ b/modules/registry/registry-core/src/test/java/org/apache/airavata/app/catalog/AppDeploymentTest.java
@@ -124,11 +124,11 @@ public class AppDeploymentTest {
 
         Map<String, String> moduleIdFilter = new HashMap<String, String>();
         moduleIdFilter.put(AppCatAbstractResource.ApplicationDeploymentConstants.APP_MODULE_ID, wrfModuleId);
-        List<ApplicationDeploymentDescription> applicationDeployements = appDep.getApplicationDeployements(moduleIdFilter);
+        List<ApplicationDeploymentDescription> applicationDeployements = appDep.getApplicationDeployments(moduleIdFilter);
         System.out.println("******** Size of App deployments for module *********** : " + applicationDeployements.size());
         Map<String, String> hostFilter = new HashMap<String, String>();
         hostFilter.put(AppCatAbstractResource.ApplicationDeploymentConstants.COMPUTE_HOST_ID, hostId);
-        List<ApplicationDeploymentDescription> applicationDeployementsForHost = appDep.getApplicationDeployements(hostFilter);
+        List<ApplicationDeploymentDescription> applicationDeployementsForHost = appDep.getApplicationDeployments(hostFilter);
         System.out.println("******** Size of App deployments for host *********** : " + applicationDeployementsForHost.size());
 
         List<String> allApplicationDeployementIds = appDep.getAllApplicationDeployementIds();
@@ -138,7 +138,7 @@ public class AppDeploymentTest {
         accessibleAppIds.add(wrfModuleId);
         List<String> accessibleComputeResourceIds = new ArrayList<>();
         accessibleComputeResourceIds.add(hostId);
-        List<ApplicationDeploymentDescription> allApplicationDeployements = appDep.getAccessibleApplicationDeployements(ServerSettings.getDefaultUserGateway(), accessibleAppIds, accessibleComputeResourceIds);
+        List<ApplicationDeploymentDescription> allApplicationDeployements = appDep.getAccessibleApplicationDeployments(ServerSettings.getDefaultUserGateway(), accessibleAppIds, accessibleComputeResourceIds);
         System.out.println("******** Size of all App deployments *********** : " + allApplicationDeployements.size());
 
         assertTrue("App interface saved successfully", app != null);
diff --git a/modules/registry/registry-cpi/src/main/java/org/apache/airavata/registry/cpi/ApplicationDeployment.java b/modules/registry/registry-cpi/src/main/java/org/apache/airavata/registry/cpi/ApplicationDeployment.java
index 4c455b7..1396490 100644
--- a/modules/registry/registry-cpi/src/main/java/org/apache/airavata/registry/cpi/ApplicationDeployment.java
+++ b/modules/registry/registry-cpi/src/main/java/org/apache/airavata/registry/cpi/ApplicationDeployment.java
@@ -51,7 +51,7 @@ public interface ApplicationDeployment {
      * @param filters map should be provided as the field name and it's value
      * @return list of application deployments
      */
-    List<ApplicationDeploymentDescription> getApplicationDeployements (Map<String, String> filters) throws AppCatalogException;
+    List<ApplicationDeploymentDescription> getApplicationDeployments(Map<String, String> filters) throws AppCatalogException;
 
     /**
      * This method will return a list of all application deployments
@@ -67,7 +67,7 @@ public interface ApplicationDeployment {
      * @param accessibleComputeResourceIds List of Accessible Compute Resource IDs
      * @return list of all application deployments accessible to the user
      */
-    List<ApplicationDeploymentDescription> getAccessibleApplicationDeployements (String gatewayId, List<String> accessibleAppIds, List<String> accessibleComputeResourceIds) throws AppCatalogException;
+    List<ApplicationDeploymentDescription> getAccessibleApplicationDeployments(String gatewayId, List<String> accessibleAppIds, List<String> accessibleComputeResourceIds) throws AppCatalogException;
 
     List<String> getAllApplicationDeployementIds () throws AppCatalogException;
 
diff --git a/modules/registry/registry-server/registry-api-service/src/main/java/org/apache/airavata/registry/api/service/handler/RegistryServerHandler.java b/modules/registry/registry-server/registry-api-service/src/main/java/org/apache/airavata/registry/api/service/handler/RegistryServerHandler.java
index 59ac946..10f29a0 100644
--- a/modules/registry/registry-server/registry-api-service/src/main/java/org/apache/airavata/registry/api/service/handler/RegistryServerHandler.java
+++ b/modules/registry/registry-server/registry-api-service/src/main/java/org/apache/airavata/registry/api/service/handler/RegistryServerHandler.java
@@ -1310,7 +1310,7 @@ public class RegistryServerHandler implements RegistryService.Iface {
             throw new RegistryServiceException("Gateway does not exist.Please provide a valid gateway id...");
         }
         try {
-            List<ApplicationDeploymentDescription> deployements = applicationDeploymentRepository.getAccessibleApplicationDeployements(gatewayId, accessibleAppDeploymentIds, accessibleComputeResourceIds);
+            List<ApplicationDeploymentDescription> deployements = applicationDeploymentRepository.getAccessibleApplicationDeployments(gatewayId, accessibleAppDeploymentIds, accessibleComputeResourceIds);
             logger.debug("Airavata retrieved application deployments for gateway id : " + gatewayId);
             return deployements;
         } catch (AppCatalogException e) {
@@ -1334,7 +1334,7 @@ public class RegistryServerHandler implements RegistryService.Iface {
             List<String> appDeployments = new ArrayList<>();
             Map<String, String> filters = new HashMap<>();
             filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, appModuleId);
-            List<ApplicationDeploymentDescription> applicationDeployments = applicationDeploymentRepository.getApplicationDeployements(filters);
+            List<ApplicationDeploymentDescription> applicationDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
             for (ApplicationDeploymentDescription description : applicationDeployments){
                 appDeployments.add(description.getAppDeploymentId());
             }
@@ -1353,7 +1353,7 @@ public class RegistryServerHandler implements RegistryService.Iface {
         try {
             Map<String, String> filters = new HashMap<>();
             filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, appModuleId);
-            List<ApplicationDeploymentDescription> applicationDeployments = applicationDeploymentRepository.getApplicationDeployements(filters);
+            List<ApplicationDeploymentDescription> applicationDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
             return applicationDeployments;
         } catch (AppCatalogException e) {
             logger.error(appModuleId, "Error while retrieving application deployments...", e);
@@ -1516,7 +1516,7 @@ public class RegistryServerHandler implements RegistryService.Iface {
                 for (String moduleId : applicationModules) {
                     filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, moduleId);
                     List<ApplicationDeploymentDescription> applicationDeployments =
-                            applicationDeploymentRepository.getApplicationDeployements(filters);
+                            applicationDeploymentRepository.getApplicationDeployments(filters);
                     for (ApplicationDeploymentDescription deploymentDescription : applicationDeployments) {
                         if (allComputeResources.get(deploymentDescription.getComputeHostId()) != null){
                             availableComputeResources.put(deploymentDescription.getComputeHostId(),

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.