You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by ch...@apache.org on 2014/02/17 20:29:58 UTC

[1/8] new datamodels for AIRAVATA-1017

Repository: airavata
Updated Branches:
  refs/heads/master 0cd438c72 -> a0c1cbde9


http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/resources/registry-mysql.sql
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/resources/registry-mysql.sql b/modules/registry/airavata-jpa-registry/src/main/resources/registry-mysql.sql
index d67174e..fcd3f76 100644
--- a/modules/registry/airavata-jpa-registry/src/main/resources/registry-mysql.sql
+++ b/modules/registry/airavata-jpa-registry/src/main/resources/registry-mysql.sql
@@ -283,7 +283,8 @@ CREATE TABLE STATUS
         PRIMARY KEY(STATUS_ID),
         FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
         FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE,
-        FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE
+        FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE,
+        FOREIGN KEY (TRANSFER_ID) REFERENCES DATA_TRANSFER_DETAIL(TRANSFER_ID) ON DELETE CASCADE
 );
 
 CREATE TABLE CONFIG_DATA
@@ -343,7 +344,6 @@ CREATE TABLE ADVANCE_OUTPUT_DATA_HANDLING
        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
 );
 
-
 CREATE TABLE QOS_PARAMS
 (
         QOS_ID INTEGER NOT NULL AUTO_INCREMENT,


[3/8] new datamodels for AIRAVATA-1017

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobStatusResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobStatusResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobStatusResource.java
index 930b025..1ff7612 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobStatusResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobStatusResource.java
@@ -1,113 +1,113 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.GFac_Job_Data;
-import org.apache.airavata.persistance.registry.jpa.model.GFac_Job_Status;
-import org.apache.airavata.persistance.registry.jpa.model.Workflow_Data;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import java.sql.Timestamp;
-import java.util.List;
-
-public class GFacJobStatusResource extends AbstractResource {
-    private final static Logger logger = LoggerFactory.getLogger(GFacJobStatusResource.class);
-    private GFacJobDataResource gFacJobDataResource;
-    private String localJobID;
-    private Timestamp statusUpdateTime;
-    private String status;
-
-    public String getLocalJobID() {
-        return localJobID;
-    }
-
-    public Timestamp getStatusUpdateTime() {
-        return statusUpdateTime;
-    }
-
-    public String getStatus() {
-        return status;
-    }
-
-    public void setLocalJobID(String localJobID) {
-        this.localJobID = localJobID;
-    }
-
-    public void setStatusUpdateTime(Timestamp statusUpdateTime) {
-        this.statusUpdateTime = statusUpdateTime;
-    }
-
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    public GFacJobDataResource getgFacJobDataResource() {
-        return gFacJobDataResource;
-    }
-
-    public void setgFacJobDataResource(GFacJobDataResource gFacJobDataResource) {
-        this.gFacJobDataResource = gFacJobDataResource;
-    }
-
-    @Override
-    public Resource create(ResourceType type) {
-        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public List<Resource> get(ResourceType type) {
-        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        GFac_Job_Status gFacJobStatus = new GFac_Job_Status();
-        GFac_Job_Data gFacJobData = em.find(GFac_Job_Data.class, localJobID);
-        gFacJobStatus.setgFac_job_data(gFacJobData);
-        gFacJobStatus.setLocal_Job_ID(localJobID);
-        gFacJobStatus.setStatus_update_time(statusUpdateTime);
-        gFacJobStatus.setStatus(status);
-        em.persist(gFacJobStatus);
-        em.getTransaction().commit();
-        em.close();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.GFac_Job_Data;
+//import org.apache.airavata.persistance.registry.jpa.model.GFac_Job_Status;
+//import org.apache.airavata.persistance.registry.jpa.model.Workflow_Data;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import java.sql.Timestamp;
+//import java.util.List;
+//
+//public class GFacJobStatusResource extends AbstractResource {
+//    private final static Logger logger = LoggerFactory.getLogger(GFacJobStatusResource.class);
+//    private GFacJobDataResource gFacJobDataResource;
+//    private String localJobID;
+//    private Timestamp statusUpdateTime;
+//    private String status;
+//
+//    public String getLocalJobID() {
+//        return localJobID;
+//    }
+//
+//    public Timestamp getStatusUpdateTime() {
+//        return statusUpdateTime;
+//    }
+//
+//    public String getStatus() {
+//        return status;
+//    }
+//
+//    public void setLocalJobID(String localJobID) {
+//        this.localJobID = localJobID;
+//    }
+//
+//    public void setStatusUpdateTime(Timestamp statusUpdateTime) {
+//        this.statusUpdateTime = statusUpdateTime;
+//    }
+//
+//    public void setStatus(String status) {
+//        this.status = status;
+//    }
+//
+//    public GFacJobDataResource getgFacJobDataResource() {
+//        return gFacJobDataResource;
+//    }
+//
+//    public void setgFacJobDataResource(GFacJobDataResource gFacJobDataResource) {
+//        this.gFacJobDataResource = gFacJobDataResource;
+//    }
+//
+//    @Override
+//    public Resource create(ResourceType type) {
+//        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public List<Resource> get(ResourceType type) {
+//        logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        GFac_Job_Status gFacJobStatus = new GFac_Job_Status();
+//        GFac_Job_Data gFacJobData = em.find(GFac_Job_Data.class, localJobID);
+//        gFacJobStatus.setgFac_job_data(gFacJobData);
+//        gFacJobStatus.setLocal_Job_ID(localJobID);
+//        gFacJobStatus.setStatus_update_time(statusUpdateTime);
+//        gFacJobStatus.setStatus(status);
+//        em.persist(gFacJobStatus);
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GatewayResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GatewayResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GatewayResource.java
index 5e030fb..ec18eb1 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GatewayResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GatewayResource.java
@@ -124,18 +124,18 @@ public class GatewayResource extends AbstractResource {
                         new ApplicationDescriptorResource();
                 applicationDescriptorResource.setGatewayName(gatewayName);
                 return applicationDescriptorResource;
-            case EXPERIMENT_METADATA:
-                ExperimentMetadataResource metadataResource =new ExperimentMetadataResource();
-                metadataResource.setGateway(this);
-                return metadataResource;
+//            case EXPERIMENT_METADATA:
+//                ExperimentMetadataResource metadataResource =new ExperimentMetadataResource();
+//                metadataResource.setGateway(this);
+//                return metadataResource;
             case GATEWAY_WORKER:
                 WorkerResource workerResource = new WorkerResource();
                 workerResource.setGateway(this);
                 return workerResource;
             case ORCHESTRATOR:
-                OrchestratorDataResource orchestratorDataResource = new OrchestratorDataResource();
-                orchestratorDataResource.setGateway(this);
-                return orchestratorDataResource;
+//                OrchestratorDataResource orchestratorDataResource = new OrchestratorDataResource();
+//                orchestratorDataResource.setGateway(this);
+//                return orchestratorDataResource;
             default:
                 logger.error("Unsupported resource type for gateway resource.", new IllegalArgumentException());
                 throw new IllegalArgumentException("Unsupported resource type for gateway resource.");
@@ -255,16 +255,16 @@ public class GatewayResource extends AbstractResource {
                 em.close();
                 return hostDescriptorResource;
             case EXPERIMENT_METADATA:
-                generator = new QueryGenerator(EXPERIMENT_METADATA);
-                generator.setParameter(ExperimentMetadataConstants.EXPERIMENT_ID, name);
-//                generator.setParameter(ExperimentMetadataConstants.GATEWAY_NAME, gatewayName);
-                q = generator.selectQuery(em);
-                Experiment_Metadata experiment = (Experiment_Metadata)q.getSingleResult();
-                ExperimentMetadataResource experimentResource =
-                        (ExperimentMetadataResource)Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
-                em.getTransaction().commit();
-                em.close();
-                return experimentResource;
+//                generator = new QueryGenerator(EXPERIMENT_METADATA);
+//                generator.setParameter(ExperimentMetadataConstants.EXPERIMENT_ID, name);
+////                generator.setParameter(ExperimentMetadataConstants.GATEWAY_NAME, gatewayName);
+//                q = generator.selectQuery(em);
+//                Experiment_Metadata experiment = (Experiment_Metadata)q.getSingleResult();
+//                ExperimentMetadataResource experimentResource =
+//                        (ExperimentMetadataResource)Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
+//                em.getTransaction().commit();
+//                em.close();
+//                return experimentResource;
             case SERVICE_DESCRIPTOR:
                 generator = new QueryGenerator(SERVICE_DESCRIPTOR);
                 generator.setParameter(ServiceDescriptorConstants.SERVICE_DESC_ID, name);
@@ -288,15 +288,15 @@ public class GatewayResource extends AbstractResource {
                 em.close();
                 return applicationDescriptorResource;
             case ORCHESTRATOR:
-                generator = new QueryGenerator(ORCHESTRATOR);
-                generator.setParameter(OrchestratorDataConstants.EXPERIMENT_ID, name);
-                q = generator.selectQuery(em);
-                Orchestrator orchData = (Orchestrator) q.getSingleResult();
-                OrchestratorDataResource orchestratorDataResource =
-                        (OrchestratorDataResource)Utils.getResource(ResourceType.ORCHESTRATOR, orchData);
-                em.getTransaction().commit();
-                em.close();
-                return orchestratorDataResource;
+//                generator = new QueryGenerator(ORCHESTRATOR);
+//                generator.setParameter(OrchestratorDataConstants.EXPERIMENT_ID, name);
+//                q = generator.selectQuery(em);
+//                Orchestrator orchData = (Orchestrator) q.getSingleResult();
+//                OrchestratorDataResource orchestratorDataResource =
+//                        (OrchestratorDataResource)Utils.getResource(ResourceType.ORCHESTRATOR, orchData);
+//                em.getTransaction().commit();
+//                em.close();
+//                return orchestratorDataResource;
             default:
                 em.getTransaction().commit();
                 em.close();
@@ -405,20 +405,20 @@ public class GatewayResource extends AbstractResource {
                     }
                 }
                 break;
-            case EXPERIMENT_METADATA:
-                generator = new QueryGenerator(EXPERIMENT_METADATA);
-                generator.setParameter(ExperimentMetadataConstants.GATEWAY_NAME, gatewayName);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Experiment_Metadata experiment = (Experiment_Metadata) result;
-                        ExperimentMetadataResource experimentResource =
-                                (ExperimentMetadataResource)Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
-                        resourceList.add(experimentResource);
-                    }
-                }
-                break;
+//            case EXPERIMENT_METADATA:
+//                generator = new QueryGenerator(EXPERIMENT_METADATA);
+//                generator.setParameter(ExperimentMetadataConstants.GATEWAY_NAME, gatewayName);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Experiment_Metadata experiment = (Experiment_Metadata) result;
+//                        ExperimentMetadataResource experimentResource =
+//                                (ExperimentMetadataResource)Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
+//                        resourceList.add(experimentResource);
+//                    }
+//                }
+//                break;
             case USER:
 		        generator = new QueryGenerator(USERS);
 		        q = generator.selectQuery(em);
@@ -429,16 +429,16 @@ public class GatewayResource extends AbstractResource {
 		        	resourceList.add(userResource);
 		        }
 		        break;
-            case ORCHESTRATOR:
-                generator = new QueryGenerator(ORCHESTRATOR);
-                q = generator.selectQuery(em);
-                for (Object o : q.getResultList()) {
-                    Orchestrator orchData = (Orchestrator) o;
-                    OrchestratorDataResource orchestratorDataResource =
-                            (OrchestratorDataResource)Utils.getResource(ResourceType.ORCHESTRATOR, orchData);
-                    resourceList.add(orchestratorDataResource);
-                }
-                break;
+//            case ORCHESTRATOR:
+//                generator = new QueryGenerator(ORCHESTRATOR);
+//                q = generator.selectQuery(em);
+//                for (Object o : q.getResultList()) {
+//                    Orchestrator orchData = (Orchestrator) o;
+//                    OrchestratorDataResource orchestratorDataResource =
+//                            (OrchestratorDataResource)Utils.getResource(ResourceType.ORCHESTRATOR, orchData);
+//                    resourceList.add(orchestratorDataResource);
+//                }
+//                break;
             default:
                 em.getTransaction().commit();
                 em.close();
@@ -511,16 +511,16 @@ public class GatewayResource extends AbstractResource {
                 Application_Descriptor existingAppDesc = em.find(Application_Descriptor.class, new Application_Descriptor_PK(gatewayName, name.toString()));
                 em.close();
                 return existingAppDesc != null;
-            case EXPERIMENT_METADATA:
-                em = ResourceUtils.getEntityManager();
-                Experiment_Metadata existingExp = em.find(Experiment_Metadata.class, name.toString());
-                em.close();
-                return existingExp != null;
-            case ORCHESTRATOR:
-                em = ResourceUtils.getEntityManager();
-                Orchestrator existingOrchestrator = em.find(Orchestrator.class, name.toString());
-                em.close();
-                return existingOrchestrator != null;   
+//            case EXPERIMENT_METADATA:
+//                em = ResourceUtils.getEntityManager();
+//                Experiment_Metadata existingExp = em.find(Experiment_Metadata.class, name.toString());
+//                em.close();
+//                return existingExp != null;
+//            case ORCHESTRATOR:
+//                em = ResourceUtils.getEntityManager();
+//                Orchestrator existingOrchestrator = em.find(Orchestrator.class, name.toString());
+//                em.close();
+//                return existingOrchestrator != null;
             default:
                 logger.error("Unsupported resource type for gateway resource.", new IllegalArgumentException());
                 throw new IllegalArgumentException("Unsupported resource type for gateway resource.");
@@ -776,16 +776,16 @@ public class GatewayResource extends AbstractResource {
     	remove(ResourceType.PUBLISHED_WORKFLOW, workflowTemplateName);
     }
     
-    public OrchestratorDataResource createOrchestratorData(String experimentID){
-    	OrchestratorDataResource dataResource = (OrchestratorDataResource)create(ResourceType.ORCHESTRATOR);
-    	dataResource.setExperimentID(experimentID);
-    	return dataResource;
-    }
-
-    public ExperimentMetadataResource createBasicMetada (String experimentID){
-        ExperimentMetadataResource metadataResource = (ExperimentMetadataResource)create(ResourceType.EXPERIMENT_METADATA);
-        metadataResource.setExpID(experimentID);
-        return metadataResource;
-    }
+//    public OrchestratorDataResource createOrchestratorData(String experimentID){
+//    	OrchestratorDataResource dataResource = (OrchestratorDataResource)create(ResourceType.ORCHESTRATOR);
+//    	dataResource.setExperimentID(experimentID);
+//    	return dataResource;
+//    }
+
+//    public ExperimentMetadataResource createBasicMetada (String experimentID){
+//        ExperimentMetadataResource metadataResource = (ExperimentMetadataResource)create(ResourceType.EXPERIMENT_METADATA);
+//        metadataResource.setExpID(experimentID);
+//        return metadataResource;
+//    }
 }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GramDataResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GramDataResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GramDataResource.java
index 238f7ab..81a6ac7 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GramDataResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GramDataResource.java
@@ -1,133 +1,133 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.Gram_Data;
-import org.apache.airavata.persistance.registry.jpa.model.Gram_DataPK;
-import org.apache.airavata.persistance.registry.jpa.model.Node_Data;
-import org.apache.airavata.persistance.registry.jpa.model.Workflow_Data;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import java.util.List;
-
-public class GramDataResource extends AbstractResource{
-    private final static Logger logger = LoggerFactory.getLogger(GramDataResource.class);
-    private WorkflowDataResource workflowDataResource;
-    private String nodeID;
-    private String rsl;
-    private String invokedHost;
-    private String localJobID;
-
-    public String getNodeID() {
-        return nodeID;
-    }
-
-    public String getRsl() {
-        return rsl;
-    }
-
-    public String getInvokedHost() {
-        return invokedHost;
-    }
-
-    public String getLocalJobID() {
-        return localJobID;
-    }
-
-    public WorkflowDataResource getWorkflowDataResource() {
-        return workflowDataResource;
-    }
-
-    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
-        this.workflowDataResource = workflowDataResource;
-    }
-
-    public void setNodeID(String nodeID) {
-        this.nodeID = nodeID;
-    }
-
-    public void setRsl(String rsl) {
-        this.rsl = rsl;
-    }
-
-    public void setInvokedHost(String invokedHost) {
-        this.invokedHost = invokedHost;
-    }
-
-    public void setLocalJobID(String localJobID) {
-        this.localJobID = localJobID;
-    }
-
-    public Resource create(ResourceType type) {
-        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    public List<Resource> get(ResourceType type) {
-        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        Gram_Data existingGramData = em.find(Gram_Data.class, new Gram_DataPK(workflowDataResource.getWorkflowInstanceID(), nodeID));
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Gram_Data gramData = new Gram_Data();
-        gramData.setNode_id(nodeID);
-        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
-        gramData.setWorkflow_Data(workflow_data);
-        gramData.setNode_id(nodeID);
-        gramData.setInvoked_host(invokedHost);
-        gramData.setLocal_Job_ID(localJobID);
-        byte[] bytes = rsl.getBytes();
-        gramData.setRsl(bytes);
-        if(existingGramData != null){
-            existingGramData.setInvoked_host(invokedHost);
-            existingGramData.setLocal_Job_ID(localJobID);
-            existingGramData.setRsl(bytes);
-            gramData = em.merge(existingGramData);
-        }  else {
-            em.persist(gramData);
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.Gram_Data;
+//import org.apache.airavata.persistance.registry.jpa.model.Gram_DataPK;
+//import org.apache.airavata.persistance.registry.jpa.model.Node_Data;
+//import org.apache.airavata.persistance.registry.jpa.model.Workflow_Data;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import java.util.List;
+//
+//public class GramDataResource extends AbstractResource{
+//    private final static Logger logger = LoggerFactory.getLogger(GramDataResource.class);
+//    private WorkflowDataResource workflowDataResource;
+//    private String nodeID;
+//    private String rsl;
+//    private String invokedHost;
+//    private String localJobID;
+//
+//    public String getNodeID() {
+//        return nodeID;
+//    }
+//
+//    public String getRsl() {
+//        return rsl;
+//    }
+//
+//    public String getInvokedHost() {
+//        return invokedHost;
+//    }
+//
+//    public String getLocalJobID() {
+//        return localJobID;
+//    }
+//
+//    public WorkflowDataResource getWorkflowDataResource() {
+//        return workflowDataResource;
+//    }
+//
+//    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
+//        this.workflowDataResource = workflowDataResource;
+//    }
+//
+//    public void setNodeID(String nodeID) {
+//        this.nodeID = nodeID;
+//    }
+//
+//    public void setRsl(String rsl) {
+//        this.rsl = rsl;
+//    }
+//
+//    public void setInvokedHost(String invokedHost) {
+//        this.invokedHost = invokedHost;
+//    }
+//
+//    public void setLocalJobID(String localJobID) {
+//        this.localJobID = localJobID;
+//    }
+//
+//    public Resource create(ResourceType type) {
+//        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public List<Resource> get(ResourceType type) {
+//        logger.error("Unsupported resource type for Gram data resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        Gram_Data existingGramData = em.find(Gram_Data.class, new Gram_DataPK(workflowDataResource.getWorkflowInstanceID(), nodeID));
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Gram_Data gramData = new Gram_Data();
+//        gramData.setNode_id(nodeID);
+//        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
+//        gramData.setWorkflow_Data(workflow_data);
+//        gramData.setNode_id(nodeID);
+//        gramData.setInvoked_host(invokedHost);
+//        gramData.setLocal_Job_ID(localJobID);
+//        byte[] bytes = rsl.getBytes();
+//        gramData.setRsl(bytes);
+//        if(existingGramData != null){
+//            existingGramData.setInvoked_host(invokedHost);
+//            existingGramData.setLocal_Job_ID(localJobID);
+//            existingGramData.setRsl(bytes);
+//            gramData = em.merge(existingGramData);
+//        }  else {
+//            em.persist(gramData);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/NodeDataResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/NodeDataResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/NodeDataResource.java
index 8b5881e..efbfcb6 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/NodeDataResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/NodeDataResource.java
@@ -1,274 +1,274 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.*;
-import org.apache.airavata.persistance.registry.jpa.utils.QueryGenerator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.List;
-
-public class NodeDataResource extends AbstractResource{
-    private final static Logger logger = LoggerFactory.getLogger(NodeDataResource.class);
-    private WorkflowDataResource workflowDataResource;
-    private String nodeID;
-    private String nodeType;
-    private String inputs;
-    private String outputs;
-    private String status;
-    private Timestamp startTime;
-    private Timestamp lastUpdateTime;
-    private int executionIndex;
-
-    public WorkflowDataResource getWorkflowDataResource() {
-        return workflowDataResource;
-    }
-
-    public String getNodeID() {
-        return nodeID;
-    }
-
-    public String getNodeType() {
-        return nodeType;
-    }
-
-    public String getInputs() {
-        return inputs;
-    }
-
-    public String getOutputs() {
-        return outputs;
-    }
-
-    public String getStatus() {
-        return status;
-    }
-
-    public Timestamp getStartTime() {
-        return startTime;
-    }
-
-    public Timestamp getLastUpdateTime() {
-        return lastUpdateTime;
-    }
-
-    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
-        this.workflowDataResource = workflowDataResource;
-    }
-
-    public void setNodeID(String nodeID) {
-        this.nodeID = nodeID;
-    }
-
-    public void setNodeType(String nodeType) {
-        this.nodeType = nodeType;
-    }
-
-    public void setInputs(String inputs) {
-        this.inputs = inputs;
-    }
-
-    public void setOutputs(String outputs) {
-        this.outputs = outputs;
-    }
-
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    public void setStartTime(Timestamp startTime) {
-        this.startTime = startTime;
-    }
-
-    public void setLastUpdateTime(Timestamp lastUpdateTime) {
-        this.lastUpdateTime = lastUpdateTime;
-    }
-
-    public Resource create(ResourceType type) {
-        switch (type){
-            case GFAC_JOB_DATA:
-                GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
-                gFacJobDataResource.setWorkflowDataResource(workflowDataResource);
-                gFacJobDataResource.setNodeID(nodeID);
-                return gFacJobDataResource;
-            default:
-                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
-                throw new IllegalArgumentException("Unsupported resource type for node data resource.");
-        }
-    }
-
-    public void remove(ResourceType type, Object name) {
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Query q;
-        QueryGenerator generator;
-        switch (type){
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
-                q = generator.deleteQuery(em);
-                q.executeUpdate();
-                break;
-            default:
-                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
-                break;
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-
-    public Resource get(ResourceType type, Object name) {
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        QueryGenerator generator;
-        Query q;
-        switch (type) {
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
-                q = generator.selectQuery(em);
-                GFac_Job_Data gFac_job_data = (GFac_Job_Data)q.getSingleResult();
-                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
-                em.getTransaction().commit();
-                em.close();
-                return gFacJobDataResource;
-            default:
-                em.getTransaction().commit();
-                em.close();
-                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
-                throw new IllegalArgumentException("Unsupported resource type for node data resource.");
-        }
-    }
-
-    public List<Resource> getGFacJobs(){
-    	return get(ResourceType.GFAC_JOB_DATA);
-    }
-    public List<Resource> get(ResourceType type) {
-        List<Resource> resourceList = new ArrayList<Resource>();
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Query q;
-        QueryGenerator generator;
-        List<?> results;
-        switch (type){
-            case EXECUTION_ERROR:
-                generator = new QueryGenerator(EXECUTION_ERROR);
-                generator.setParameter(ExecutionErrorConstants.NODE_ID, nodeID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Execution_Error execution_error = (Execution_Error)result;
-                        ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, execution_error);
-                        resourceList.add(executionErrorResource);
-                    }
-                }
-                break;
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.EXPERIMENT_ID, workflowDataResource.getExperimentID());
-                generator.setParameter(GFacJobDataConstants.WORKFLOW_INSTANCE_ID, workflowDataResource.getWorkflowInstanceID());
-                generator.setParameter(GFacJobDataConstants.NODE_ID, nodeID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        GFac_Job_Data gFac_job_data = (GFac_Job_Data)result;
-                        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
-                        resourceList.add(gFacJobDataResource);
-                    }
-                }
-                break;
-            default:
-                em.getTransaction().commit();
-                em.close();
-                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
-                throw new IllegalArgumentException("Unsupported resource type for node data resource.");
-        }
-        em.getTransaction().commit();
-        em.close();
-        return resourceList;
-    }
-
-    public void save() {
-        if(lastUpdateTime == null){
-            java.util.Date date= new java.util.Date();
-            lastUpdateTime = new Timestamp(date.getTime());
-        }
-        EntityManager em = ResourceUtils.getEntityManager();
-        Node_Data existingNodeData = em.find(Node_Data.class, new Node_DataPK(workflowDataResource.getWorkflowInstanceID(), nodeID, executionIndex));
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Node_Data nodeData = new Node_Data();
-        nodeData.setNode_id(nodeID);
-        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
-        nodeData.setWorkflow_Data(workflow_data);
-        byte[] inputsByte = null;
-        if (inputs!=null) {
-			inputsByte = inputs.getBytes();
-			nodeData.setInputs(inputsByte);
-		}
-		byte[] outputsByte = null;
-        if (outputs!=null) {
-			outputsByte = outputs.getBytes();
-			nodeData.setOutputs(outputsByte);
-		}
-		nodeData.setNode_type(nodeType);
-        nodeData.setLast_update_time(lastUpdateTime);
-        nodeData.setStart_time(startTime);
-        nodeData.setStatus(status);
-        nodeData.setExecution_index(executionIndex);
-        if(existingNodeData != null){
-            existingNodeData.setInputs(inputsByte);
-            existingNodeData.setOutputs(outputsByte);
-            existingNodeData.setLast_update_time(lastUpdateTime);
-            existingNodeData.setNode_type(nodeType);
-            existingNodeData.setStart_time(startTime);
-            existingNodeData.setStatus(status);
-            existingNodeData.setExecution_index(executionIndex);
-            nodeData = em.merge(existingNodeData);
-        }  else {
-            em.persist(nodeData);
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-
-    public int getExecutionIndex() {
-        return executionIndex;
-    }
-
-    public void setExecutionIndex(int executionIndex) {
-        this.executionIndex = executionIndex;
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.*;
+//import org.apache.airavata.persistance.registry.jpa.utils.QueryGenerator;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import javax.persistence.Query;
+//import java.sql.Timestamp;
+//import java.util.ArrayList;
+//import java.util.List;
+//
+//public class NodeDataResource extends AbstractResource{
+//    private final static Logger logger = LoggerFactory.getLogger(NodeDataResource.class);
+//    private WorkflowDataResource workflowDataResource;
+//    private String nodeID;
+//    private String nodeType;
+//    private String inputs;
+//    private String outputs;
+//    private String status;
+//    private Timestamp startTime;
+//    private Timestamp lastUpdateTime;
+//    private int executionIndex;
+//
+//    public WorkflowDataResource getWorkflowDataResource() {
+//        return workflowDataResource;
+//    }
+//
+//    public String getNodeID() {
+//        return nodeID;
+//    }
+//
+//    public String getNodeType() {
+//        return nodeType;
+//    }
+//
+//    public String getInputs() {
+//        return inputs;
+//    }
+//
+//    public String getOutputs() {
+//        return outputs;
+//    }
+//
+//    public String getStatus() {
+//        return status;
+//    }
+//
+//    public Timestamp getStartTime() {
+//        return startTime;
+//    }
+//
+//    public Timestamp getLastUpdateTime() {
+//        return lastUpdateTime;
+//    }
+//
+//    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
+//        this.workflowDataResource = workflowDataResource;
+//    }
+//
+//    public void setNodeID(String nodeID) {
+//        this.nodeID = nodeID;
+//    }
+//
+//    public void setNodeType(String nodeType) {
+//        this.nodeType = nodeType;
+//    }
+//
+//    public void setInputs(String inputs) {
+//        this.inputs = inputs;
+//    }
+//
+//    public void setOutputs(String outputs) {
+//        this.outputs = outputs;
+//    }
+//
+//    public void setStatus(String status) {
+//        this.status = status;
+//    }
+//
+//    public void setStartTime(Timestamp startTime) {
+//        this.startTime = startTime;
+//    }
+//
+//    public void setLastUpdateTime(Timestamp lastUpdateTime) {
+//        this.lastUpdateTime = lastUpdateTime;
+//    }
+//
+//    public Resource create(ResourceType type) {
+//        switch (type){
+//            case GFAC_JOB_DATA:
+//                GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
+//                gFacJobDataResource.setWorkflowDataResource(workflowDataResource);
+//                gFacJobDataResource.setNodeID(nodeID);
+//                return gFacJobDataResource;
+//            default:
+//                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
+//                throw new IllegalArgumentException("Unsupported resource type for node data resource.");
+//        }
+//    }
+//
+//    public void remove(ResourceType type, Object name) {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Query q;
+//        QueryGenerator generator;
+//        switch (type){
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
+//                q = generator.deleteQuery(em);
+//                q.executeUpdate();
+//                break;
+//            default:
+//                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
+//                break;
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//
+//    public Resource get(ResourceType type, Object name) {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        QueryGenerator generator;
+//        Query q;
+//        switch (type) {
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
+//                q = generator.selectQuery(em);
+//                GFac_Job_Data gFac_job_data = (GFac_Job_Data)q.getSingleResult();
+//                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
+//                em.getTransaction().commit();
+//                em.close();
+//                return gFacJobDataResource;
+//            default:
+//                em.getTransaction().commit();
+//                em.close();
+//                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
+//                throw new IllegalArgumentException("Unsupported resource type for node data resource.");
+//        }
+//    }
+//
+//    public List<Resource> getGFacJobs(){
+//    	return get(ResourceType.GFAC_JOB_DATA);
+//    }
+//    public List<Resource> get(ResourceType type) {
+//        List<Resource> resourceList = new ArrayList<Resource>();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Query q;
+//        QueryGenerator generator;
+//        List<?> results;
+//        switch (type){
+//            case EXECUTION_ERROR:
+//                generator = new QueryGenerator(EXECUTION_ERROR);
+//                generator.setParameter(ExecutionErrorConstants.NODE_ID, nodeID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Execution_Error execution_error = (Execution_Error)result;
+//                        ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, execution_error);
+//                        resourceList.add(executionErrorResource);
+//                    }
+//                }
+//                break;
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.EXPERIMENT_ID, workflowDataResource.getExperimentID());
+//                generator.setParameter(GFacJobDataConstants.WORKFLOW_INSTANCE_ID, workflowDataResource.getWorkflowInstanceID());
+//                generator.setParameter(GFacJobDataConstants.NODE_ID, nodeID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        GFac_Job_Data gFac_job_data = (GFac_Job_Data)result;
+//                        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
+//                        resourceList.add(gFacJobDataResource);
+//                    }
+//                }
+//                break;
+//            default:
+//                em.getTransaction().commit();
+//                em.close();
+//                logger.error("Unsupported resource type for node data resource.", new IllegalArgumentException());
+//                throw new IllegalArgumentException("Unsupported resource type for node data resource.");
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//        return resourceList;
+//    }
+//
+//    public void save() {
+//        if(lastUpdateTime == null){
+//            java.util.Date date= new java.util.Date();
+//            lastUpdateTime = new Timestamp(date.getTime());
+//        }
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        Node_Data existingNodeData = em.find(Node_Data.class, new Node_DataPK(workflowDataResource.getWorkflowInstanceID(), nodeID, executionIndex));
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Node_Data nodeData = new Node_Data();
+//        nodeData.setNode_id(nodeID);
+//        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
+//        nodeData.setWorkflow_Data(workflow_data);
+//        byte[] inputsByte = null;
+//        if (inputs!=null) {
+//			inputsByte = inputs.getBytes();
+//			nodeData.setInputs(inputsByte);
+//		}
+//		byte[] outputsByte = null;
+//        if (outputs!=null) {
+//			outputsByte = outputs.getBytes();
+//			nodeData.setOutputs(outputsByte);
+//		}
+//		nodeData.setNode_type(nodeType);
+//        nodeData.setLast_update_time(lastUpdateTime);
+//        nodeData.setStart_time(startTime);
+//        nodeData.setStatus(status);
+//        nodeData.setExecution_index(executionIndex);
+//        if(existingNodeData != null){
+//            existingNodeData.setInputs(inputsByte);
+//            existingNodeData.setOutputs(outputsByte);
+//            existingNodeData.setLast_update_time(lastUpdateTime);
+//            existingNodeData.setNode_type(nodeType);
+//            existingNodeData.setStart_time(startTime);
+//            existingNodeData.setStatus(status);
+//            existingNodeData.setExecution_index(executionIndex);
+//            nodeData = em.merge(existingNodeData);
+//        }  else {
+//            em.persist(nodeData);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//
+//    public int getExecutionIndex() {
+//        return executionIndex;
+//    }
+//
+//    public void setExecutionIndex(int executionIndex) {
+//        this.executionIndex = executionIndex;
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/OrchestratorDataResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/OrchestratorDataResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/OrchestratorDataResource.java
index 54dc877..c23ca2f 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/OrchestratorDataResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/OrchestratorDataResource.java
@@ -1,189 +1,189 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import java.sql.Timestamp;
-import java.util.List;
-
-import javax.persistence.EntityManager;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.Orchestrator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class OrchestratorDataResource extends AbstractResource {
-   
-	private final static Logger log = LoggerFactory.getLogger(OrchestratorDataResource.class);
-	private String experimentID;
-	private String userName;
-	private String applicationName;
-	private String status;
-	private String state;
-	private String gfacEPR;
-	private String jobRequest;
-	private GatewayResource gateway;
-    private Timestamp submittedTime;
-    private Timestamp statusUpdateTime;
-
-	public String getExperimentID() {
-		return experimentID;
-	}
-
-	public String getUserName() {
-		return userName;
-	}
-
-	public void setExperimentID(String experimentID) {
-		this.experimentID = experimentID;
-	}
-
-	public void setUserName(String userName) {
-		this.userName = userName;
-	}
-
-	public String getApplicationName() {
-		return applicationName;
-	}
-
-	public void setApplicationName(String applicationName) {
-		this.applicationName = applicationName;
-	}
-
-	public String getStatus() {
-		return status;
-	}
-
-	public void setStatus(String status) {
-		this.status = status;
-	}
-
-	public String getState() {
-		return state;
-	}
-
-	public void setState(String state) {
-		this.state = state;
-	}
-
-	public String getGfacEPR() {
-		return gfacEPR;
-	}
-
-	public void setGfacEPR(String gfacEPR) {
-		this.gfacEPR = gfacEPR;
-	}
-
-	public String getJobRequest() {
-		return jobRequest;
-	}
-
-	public void setJobRequest(String jobRequest) {
-		this.jobRequest = jobRequest;
-	}
-
-	public GatewayResource getGateway() {
-		return gateway;
-	}
-
-	public void setGateway(GatewayResource gateway) {
-		this.gateway = gateway;
-	}
-
-	public Timestamp getSubmittedTime() {
-		return submittedTime;
-	}
-
-	public void setSubmittedTime(Timestamp submittedTime) {
-		this.submittedTime = submittedTime;
-	}
-
-	public Timestamp getStatusUpdateTime() {
-		return statusUpdateTime;
-	}
-
-	public void setStatusUpdateTime(Timestamp statusUpdateTime) {
-		this.statusUpdateTime = statusUpdateTime;
-	}
-
-	@Override
-	public Resource create(ResourceType type) {
-        log.error("Unsupported resource type for orchestrator resource.", new IllegalArgumentException());
-        throw new IllegalArgumentException("Unsupported resource type for orchestrator resource.");
-    }
-
-	@Override
-	public void remove(ResourceType type, Object name) {
-		   log.error("Unsupported operation to remove orchestrator data.", new UnsupportedOperationException());
-	       throw new UnsupportedOperationException();
-	}
-
-	@Override
-	public Resource get(ResourceType type, Object name) {
-        log.error("Unsupported resource type for orchestrator data.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-	}
-
-	@Override
-	public List<Resource> get(ResourceType type) {
-        log.error("Unsupported resource type for orchestrator data.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-	}
-
-	@Override
-	public void save() {
-		EntityManager em = ResourceUtils.getEntityManager();
-		Orchestrator existingOrchestrator = em.find(Orchestrator.class,
-				experimentID);
-		em.close();
-		em = ResourceUtils.getEntityManager();
-		em.getTransaction().begin();
-		Orchestrator orchestrator = new Orchestrator();
-		orchestrator.setExperiment_ID(experimentID);
-		orchestrator.setUserName(userName);
-		orchestrator.setGfacEPR(gfacEPR);
-		orchestrator.setState(state);
-		orchestrator.setStatus(status);
-		orchestrator.setApplicationName(applicationName);
-		orchestrator.setJobRequest(jobRequest);
-		orchestrator.setSubmittedTime(submittedTime);
-		orchestrator.setStatusUpdateTime(statusUpdateTime);
-		if (existingOrchestrator != null) {
-			existingOrchestrator.setExperiment_ID(experimentID);
-			existingOrchestrator.setUserName(userName);
-			existingOrchestrator.setState(state);
-			existingOrchestrator.setStatus(status);
-			existingOrchestrator.setGfacEPR(gfacEPR);
-			existingOrchestrator.setApplicationName(applicationName);
-			existingOrchestrator.setJobRequest(jobRequest);
-			existingOrchestrator.setSubmittedTime(submittedTime);
-			existingOrchestrator.setStatusUpdateTime(statusUpdateTime);
-			orchestrator = em.merge(existingOrchestrator);
-		} else {
-			em.persist(orchestrator);
-		}
-		em.getTransaction().commit();
-		em.close();
-	}
-
-}
\ No newline at end of file
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import java.sql.Timestamp;
+//import java.util.List;
+//
+//import javax.persistence.EntityManager;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.Orchestrator;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class OrchestratorDataResource extends AbstractResource {
+//
+//	private final static Logger log = LoggerFactory.getLogger(OrchestratorDataResource.class);
+//	private String experimentID;
+//	private String userName;
+//	private String applicationName;
+//	private String status;
+//	private String state;
+//	private String gfacEPR;
+//	private String jobRequest;
+//	private GatewayResource gateway;
+//    private Timestamp submittedTime;
+//    private Timestamp statusUpdateTime;
+//
+//	public String getExperimentID() {
+//		return experimentID;
+//	}
+//
+//	public String getUserName() {
+//		return userName;
+//	}
+//
+//	public void setExperimentID(String experimentID) {
+//		this.experimentID = experimentID;
+//	}
+//
+//	public void setUserName(String userName) {
+//		this.userName = userName;
+//	}
+//
+//	public String getApplicationName() {
+//		return applicationName;
+//	}
+//
+//	public void setApplicationName(String applicationName) {
+//		this.applicationName = applicationName;
+//	}
+//
+//	public String getStatus() {
+//		return status;
+//	}
+//
+//	public void setStatus(String status) {
+//		this.status = status;
+//	}
+//
+//	public String getState() {
+//		return state;
+//	}
+//
+//	public void setState(String state) {
+//		this.state = state;
+//	}
+//
+//	public String getGfacEPR() {
+//		return gfacEPR;
+//	}
+//
+//	public void setGfacEPR(String gfacEPR) {
+//		this.gfacEPR = gfacEPR;
+//	}
+//
+//	public String getJobRequest() {
+//		return jobRequest;
+//	}
+//
+//	public void setJobRequest(String jobRequest) {
+//		this.jobRequest = jobRequest;
+//	}
+//
+//	public GatewayResource getGateway() {
+//		return gateway;
+//	}
+//
+//	public void setGateway(GatewayResource gateway) {
+//		this.gateway = gateway;
+//	}
+//
+//	public Timestamp getSubmittedTime() {
+//		return submittedTime;
+//	}
+//
+//	public void setSubmittedTime(Timestamp submittedTime) {
+//		this.submittedTime = submittedTime;
+//	}
+//
+//	public Timestamp getStatusUpdateTime() {
+//		return statusUpdateTime;
+//	}
+//
+//	public void setStatusUpdateTime(Timestamp statusUpdateTime) {
+//		this.statusUpdateTime = statusUpdateTime;
+//	}
+//
+//	@Override
+//	public Resource create(ResourceType type) {
+//        log.error("Unsupported resource type for orchestrator resource.", new IllegalArgumentException());
+//        throw new IllegalArgumentException("Unsupported resource type for orchestrator resource.");
+//    }
+//
+//	@Override
+//	public void remove(ResourceType type, Object name) {
+//		   log.error("Unsupported operation to remove orchestrator data.", new UnsupportedOperationException());
+//	       throw new UnsupportedOperationException();
+//	}
+//
+//	@Override
+//	public Resource get(ResourceType type, Object name) {
+//        log.error("Unsupported resource type for orchestrator data.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//	}
+//
+//	@Override
+//	public List<Resource> get(ResourceType type) {
+//        log.error("Unsupported resource type for orchestrator data.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//	}
+//
+//	@Override
+//	public void save() {
+//		EntityManager em = ResourceUtils.getEntityManager();
+//		Orchestrator existingOrchestrator = em.find(Orchestrator.class,
+//				experimentID);
+//		em.close();
+//		em = ResourceUtils.getEntityManager();
+//		em.getTransaction().begin();
+//		Orchestrator orchestrator = new Orchestrator();
+//		orchestrator.setExperiment_ID(experimentID);
+//		orchestrator.setUserName(userName);
+//		orchestrator.setGfacEPR(gfacEPR);
+//		orchestrator.setState(state);
+//		orchestrator.setStatus(status);
+//		orchestrator.setApplicationName(applicationName);
+//		orchestrator.setJobRequest(jobRequest);
+//		orchestrator.setSubmittedTime(submittedTime);
+//		orchestrator.setStatusUpdateTime(statusUpdateTime);
+//		if (existingOrchestrator != null) {
+//			existingOrchestrator.setExperiment_ID(experimentID);
+//			existingOrchestrator.setUserName(userName);
+//			existingOrchestrator.setState(state);
+//			existingOrchestrator.setStatus(status);
+//			existingOrchestrator.setGfacEPR(gfacEPR);
+//			existingOrchestrator.setApplicationName(applicationName);
+//			existingOrchestrator.setJobRequest(jobRequest);
+//			existingOrchestrator.setSubmittedTime(submittedTime);
+//			existingOrchestrator.setStatusUpdateTime(statusUpdateTime);
+//			orchestrator = em.merge(existingOrchestrator);
+//		} else {
+//			em.persist(orchestrator);
+//		}
+//		em.getTransaction().commit();
+//		em.close();
+//	}
+//
+//}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ProjectResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ProjectResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ProjectResource.java
index 1a22616..c8877fe 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ProjectResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ProjectResource.java
@@ -64,16 +64,16 @@ public class ProjectResource extends AbstractResource {
      * @return child resource
      */
     public Resource create(ResourceType type) {
-        if (type == ResourceType.EXPERIMENT_METADATA) {
-            ExperimentMetadataResource experimentResource = new ExperimentMetadataResource();
-            experimentResource.setGateway(getGateway());
-            experimentResource.setExecutionUser(getWorker().getUser());
-            experimentResource.setProject(this);
-            return experimentResource;
-        } else {
+//        if (type == ResourceType.EXPERIMENT_METADATA) {
+//            ExperimentMetadataResource experimentResource = new ExperimentMetadataResource();
+//            experimentResource.setGateway(getGateway());
+//            experimentResource.setExecutionUser(getWorker().getUser());
+//            experimentResource.setProject(this);
+//            return experimentResource;
+//        } else {
             logger.error("Unsupported resource type for project resource.", new IllegalArgumentException());
             throw new IllegalArgumentException("Unsupported resource type for project resource.");
-        }
+//        }
     }
 
     /**
@@ -104,22 +104,22 @@ public class ProjectResource extends AbstractResource {
      * @return child resource
      */
     public Resource get(ResourceType type, Object name) {
-        if (type == ResourceType.EXPERIMENT_METADATA) {
-            EntityManager em = ResourceUtils.getEntityManager();
-            em.getTransaction().begin();
-        	QueryGenerator generator = new QueryGenerator(EXPERIMENT_METADATA);
-        	generator.setParameter(ExperimentMetadataConstants.EXPERIMENT_ID, name);
-        	Query q = generator.selectQuery(em);
-            Experiment_Metadata experiment = (Experiment_Metadata) q.getSingleResult();
-            ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)
-                    Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
-            em.getTransaction().commit();
-            em.close();
-            return experimentResource;
-        }else{
+//        if (type == ResourceType.EXPERIMENT_METADATA) {
+//            EntityManager em = ResourceUtils.getEntityManager();
+//            em.getTransaction().begin();
+//        	QueryGenerator generator = new QueryGenerator(EXPERIMENT_METADATA);
+//        	generator.setParameter(ExperimentMetadataConstants.EXPERIMENT_ID, name);
+//        	Query q = generator.selectQuery(em);
+//            Experiment_Metadata experiment = (Experiment_Metadata) q.getSingleResult();
+//            ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)
+//                    Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
+//            em.getTransaction().commit();
+//            em.close();
+//            return experimentResource;
+//        }else{
             logger.error("Unsupported resource type for project resource.", new IllegalArgumentException());
             throw new IllegalArgumentException("Unsupported resource type for project resource.");
-        }
+//        }
 
     }
 
@@ -157,28 +157,28 @@ public class ProjectResource extends AbstractResource {
     public List<Resource> get(ResourceType type) {
         List<Resource> resourceList = new ArrayList<Resource>();
 
-        if (type == ResourceType.EXPERIMENT_METADATA) {
-            EntityManager em = ResourceUtils.getEntityManager();
-            em.getTransaction().begin();
-        	QueryGenerator generator = new QueryGenerator(EXPERIMENT_METADATA);
-        	generator.setParameter(ExperimentMetadataConstants.PROJECT_NAME, name);
-        	Query q = generator.selectQuery(em);
-            List<?> results = q.getResultList();
-            if (results.size() != 0) {
-                for (Object result : results) {
-                    Experiment_Metadata experiment = (Experiment_Metadata) result;
-                    ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)
-                            Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
-                    resourceList.add(experimentResource);
-                }
-            }
-            em.getTransaction().commit();
-            em.close();
-        } else {
+//        if (type == ResourceType.EXPERIMENT_METADATA) {
+//            EntityManager em = ResourceUtils.getEntityManager();
+//            em.getTransaction().begin();
+//        	QueryGenerator generator = new QueryGenerator(EXPERIMENT_METADATA);
+//        	generator.setParameter(ExperimentMetadataConstants.PROJECT_NAME, name);
+//        	Query q = generator.selectQuery(em);
+//            List<?> results = q.getResultList();
+//            if (results.size() != 0) {
+//                for (Object result : results) {
+//                    Experiment_Metadata experiment = (Experiment_Metadata) result;
+//                    ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)
+//                            Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
+//                    resourceList.add(experimentResource);
+//                }
+//            }
+//            em.getTransaction().commit();
+//            em.close();
+//        } else {
             logger.error("Unsupported resource type for project resource.", new IllegalArgumentException());
             throw new IllegalArgumentException("Unsupported resource type for project resource.");
-        }
-        return resourceList;
+//        }
+//        return resourceList;
     }
 
     /**
@@ -273,33 +273,33 @@ public class ProjectResource extends AbstractResource {
      * @param experimentId experiment ID
      * @return  experiment resource
      */
-    public ExperimentMetadataResource createExperiment(String experimentId){
-		ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)create(ResourceType.EXPERIMENT_METADATA);
-		experimentResource.setExpID(experimentId);
-		return experimentResource;
-	}
+//    public ExperimentMetadataResource createExperiment(String experimentId){
+//		ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)create(ResourceType.EXPERIMENT_METADATA);
+//		experimentResource.setExpID(experimentId);
+//		return experimentResource;
+//	}
 
     /**
      *
      * @param experimentId experiment ID
      * @return experiment resource
      */
-	public ExperimentMetadataResource getExperiment(String experimentId){
-		return (ExperimentMetadataResource)get(ResourceType.EXPERIMENT_METADATA,experimentId);
-	}
+//	public ExperimentMetadataResource getExperiment(String experimentId){
+//		return (ExperimentMetadataResource)get(ResourceType.EXPERIMENT_METADATA,experimentId);
+//	}
 
     /**
      *
      * @return  list of experiments
      */
-    public List<ExperimentMetadataResource> getExperiments(){
-		List<Resource> list = get(ResourceType.EXPERIMENT_METADATA);
-		List<ExperimentMetadataResource> result=new ArrayList<ExperimentMetadataResource>();
-		for (Resource resource : list) {
-			result.add((ExperimentMetadataResource) resource);
-		}
-		return result;
-	}
+//    public List<ExperimentMetadataResource> getExperiments(){
+//		List<Resource> list = get(ResourceType.EXPERIMENT_METADATA);
+//		List<ExperimentMetadataResource> result=new ArrayList<ExperimentMetadataResource>();
+//		for (Resource resource : list) {
+//			result.add((ExperimentMetadataResource) resource);
+//		}
+//		return result;
+//	}
 
     /**
      *


[4/8] new datamodels for AIRAVATA-1017

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentMetadataResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentMetadataResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentMetadataResource.java
index b313c15..1178336 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentMetadataResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentMetadataResource.java
@@ -1,482 +1,482 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.*;
-import org.apache.airavata.persistance.registry.jpa.utils.QueryGenerator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.List;
-
-public class ExperimentMetadataResource extends AbstractResource {
-    private static final Logger logger = LoggerFactory.getLogger(ExperimentMetadataResource.class);
-    private String expID;
-    private String experimentName;
-    private String description;
-    private Timestamp submittedDate;
-    private String executionUser;
-    private GatewayResource gateway;
-    private ProjectResource project;
-    private boolean shareExp;
-
-    public static Logger getLogger() {
-        return logger;
-    }
-
-    public String getExpID() {
-        return expID;
-    }
-
-    public void setExpID(String expID) {
-        this.expID = expID;
-    }
-
-    public String getExperimentName() {
-        return experimentName;
-    }
-
-    public void setExperimentName(String experimentName) {
-        this.experimentName = experimentName;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    public void setDescription(String description) {
-        this.description = description;
-    }
-
-    public Timestamp getSubmittedDate() {
-        return submittedDate;
-    }
-
-    public void setSubmittedDate(Timestamp submittedDate) {
-        this.submittedDate = submittedDate;
-    }
-
-    public String getExecutionUser() {
-        return executionUser;
-    }
-
-    public void setExecutionUser(String executionUser) {
-        this.executionUser = executionUser;
-    }
-
-    public GatewayResource getGateway() {
-        return gateway;
-    }
-
-    public void setGateway(GatewayResource gateway) {
-        this.gateway = gateway;
-    }
-
-    public ProjectResource getProject() {
-        return project;
-    }
-
-    public void setProject(ProjectResource project) {
-        this.project = project;
-    }
-
-    public boolean isShareExp() {
-        return shareExp;
-    }
-
-    public void setShareExp(boolean shareExp) {
-        this.shareExp = shareExp;
-    }
-
-    public Resource create(ResourceType type) {
-        switch (type) {
-            case EXPERIMENT_CONFIG_DATA:
-                ExperimentConfigDataResource configDataResource = new ExperimentConfigDataResource();
-                configDataResource.setExMetadata(this);
-                return configDataResource;
-            case EXPERIMENT_SUMMARY:
-                ExperimentSummaryResource summaryResource = new ExperimentSummaryResource();
-                summaryResource.setExperimentMetadataResource(this);
-                return summaryResource;
-            case EXPERIMENT_INPUT:
-                ExperimentInputResource exInputResource = new ExperimentInputResource();
-                exInputResource.setExperimentMetadataResource(this);
-                return exInputResource;
-            case EXPERIMENT_OUTPUT:
-                ExperimentOutputResource exOutputResouce = new ExperimentOutputResource();
-                exOutputResouce.setExperimentMetadataResource(this);
-                return exOutputResouce;
-            case WORKFLOW_DATA:
-                WorkflowDataResource workflowDataResource = new WorkflowDataResource();
-                workflowDataResource.setExperimentID(expID);
-                return workflowDataResource;
-            case EXECUTION_ERROR:
-                ExecutionErrorResource executionErrorResource = new ExecutionErrorResource();
-                executionErrorResource.setMetadataResource(this);
-                return executionErrorResource;
-            case GFAC_JOB_DATA:
-                GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
-                gFacJobDataResource.setMetadataResource(this);
-                return gFacJobDataResource;
-            default:
-                logger.error("Unsupported resource type for experiment metadata resource.", new IllegalArgumentException());
-                throw new IllegalArgumentException("Unsupported resource type for gateway resource.");
-            }
-    }
-
-    public void remove(ResourceType type, Object name) {
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Query q;
-        QueryGenerator generator;
-        switch (type){
-            case EXPERIMENT_CONFIG_DATA:
-                generator = new QueryGenerator(EXPERIMENT_CONFIG_DATA);
-                generator.setParameter(ExperimentConfigurationDataConstants.EXPERIMENT_ID, name);
-                q = generator.deleteQuery(em);
-                q.executeUpdate();
-                break;
-            case EXPERIMENT_SUMMARY:
-                generator = new QueryGenerator(EXPERIMENT_SUMMARY);
-                generator.setParameter(ExperimentSummaryConstants.EXPERIMENT_ID, name);
-                q = generator.deleteQuery(em);
-                q.executeUpdate();
-                break;
-            case EXPERIMENT_INPUT:
-                generator = new QueryGenerator(EXPERIMENT_INPUT);
-                generator.setParameter(ExperimentInputConstants.EXPERIMENT_ID, name);
-                q = generator.deleteQuery(em);
-                q.executeUpdate();
-                break;
-            case EXPERIMENT_OUTPUT:
-                generator = new QueryGenerator(EXPERIMENT_OUTPUT);
-                generator.setParameter(ExperimentOutputConstants.EXPERIMENT_ID, name);
-                q = generator.deleteQuery(em);
-                q.executeUpdate();
-                break;
-            case WORKFLOW_DATA:
-                generator = new QueryGenerator(WORKFLOW_DATA);
-                generator.setParameter(WorkflowDataConstants.WORKFLOW_INSTANCE_ID, name);
-                q = generator.deleteQuery(em);
-                q.executeUpdate();
-                break;
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
-                q = generator.deleteQuery(em);
-                q.executeUpdate();
-                break;
-            default:
-                logger.error("Unsupported operation for experiment metadata resource "
-                + "since there are no child resources generated by experiment metadata resource.. ",
-                new UnsupportedOperationException());
-                throw new UnsupportedOperationException();
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-
-    public Resource get(ResourceType type, Object name) {
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        QueryGenerator generator;
-        Query q;
-        switch (type) {
-            case EXPERIMENT_CONFIG_DATA:
-                generator = new QueryGenerator(EXPERIMENT_CONFIG_DATA);
-                generator.setParameter(ExperimentConfigurationDataConstants.EXPERIMENT_ID, name);
-                q = generator.selectQuery(em);
-                Experiment_Configuration_Data exConfigData = (Experiment_Configuration_Data) q.getSingleResult();
-                ExperimentConfigDataResource experimentConfigDataResource =
-                        (ExperimentConfigDataResource)Utils.getResource(ResourceType.EXPERIMENT_CONFIG_DATA, exConfigData);
-                em.getTransaction().commit();
-                em.close();
-                return experimentConfigDataResource;
-            case EXPERIMENT_SUMMARY:
-                generator = new QueryGenerator(EXPERIMENT_SUMMARY);
-                generator.setParameter(ExperimentSummaryConstants.EXPERIMENT_ID, name);
-                q = generator.selectQuery(em);
-                Experiment_Summary exSummaryData = (Experiment_Summary) q.getSingleResult();
-                ExperimentSummaryResource exSummary =
-                        (ExperimentSummaryResource)Utils.getResource(ResourceType.EXPERIMENT_SUMMARY, exSummaryData);
-                em.getTransaction().commit();
-                em.close();
-                return exSummary;
-            case EXPERIMENT_INPUT:
-                generator = new QueryGenerator(EXPERIMENT_INPUT);
-                generator.setParameter(ExperimentInputConstants.EXPERIMENT_ID, name);
-                q = generator.selectQuery(em);
-                Experiment_Input exInput = (Experiment_Input) q.getSingleResult();
-                ExperimentInputResource experimentInput =
-                        (ExperimentInputResource)Utils.getResource(ResourceType.EXPERIMENT_INPUT, exInput);
-                em.getTransaction().commit();
-                em.close();
-                return experimentInput;
-            case EXPERIMENT_OUTPUT:
-                generator = new QueryGenerator(EXPERIMENT_OUTPUT);
-                generator.setParameter(ExperimentOutputConstants.EXPERIMENT_ID, name);
-                q = generator.selectQuery(em);
-                Experiment_Output exOutput = (Experiment_Output) q.getSingleResult();
-                ExperimentOutputResource experimentOutput =
-                        (ExperimentOutputResource)Utils.getResource(ResourceType.EXPERIMENT_OUTPUT, exOutput);
-                em.getTransaction().commit();
-                em.close();
-                return experimentOutput;
-            case WORKFLOW_DATA:
-                generator = new QueryGenerator(WORKFLOW_DATA);
-//                generator.setParameter(WorkflowDataConstants.EXPERIMENT_ID, experimentID);
-                generator.setParameter(WorkflowDataConstants.WORKFLOW_INSTANCE_ID, name);
-                q = generator.selectQuery(em);
-                Workflow_Data eworkflowData = (Workflow_Data)q.getSingleResult();
-                WorkflowDataResource workflowDataResource = (WorkflowDataResource)Utils.getResource(ResourceType.WORKFLOW_DATA, eworkflowData);
-                em.getTransaction().commit();
-                em.close();
-                return workflowDataResource;
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
-                q = generator.selectQuery(em);
-                GFac_Job_Data gFacJobData = (GFac_Job_Data)q.getSingleResult();
-                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData);
-                em.getTransaction().commit();
-                em.close();
-                return gFacJobDataResource;
-            default:
-                logger.error("Unsupported operation for experiment metadata resource "
-                        + "since there are no child resources generated by experiment metadata resource.. ",
-                        new UnsupportedOperationException());
-                throw new UnsupportedOperationException();
-        }
-    }
-
-    public List<Resource> get(ResourceType type) {
-        List<Resource> resourceList = new ArrayList<Resource>();
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Query q;
-        QueryGenerator generator;
-        List results;
-        switch (type){
-            case EXPERIMENT_INPUT:
-                generator = new QueryGenerator(EXPERIMENT_INPUT);
-                generator.setParameter(ExperimentInputConstants.EXPERIMENT_ID, expID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Experiment_Input expInput = (Experiment_Input) result;
-                        ExperimentInputResource experimentResource =
-                                (ExperimentInputResource)Utils.getResource(ResourceType.EXPERIMENT_INPUT, expInput);
-                        resourceList.add(experimentResource);
-                    }
-                }
-                break;
-            case EXPERIMENT_OUTPUT:
-                generator = new QueryGenerator(EXPERIMENT_OUTPUT);
-                generator.setParameter(ExperimentOutputConstants.EXPERIMENT_ID, expID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Experiment_Output expOutput = (Experiment_Output) result;
-                        ExperimentOutputResource experimentResource =
-                                (ExperimentOutputResource)Utils.getResource(ResourceType.EXPERIMENT_INPUT, expOutput);
-                        resourceList.add(experimentResource);
-                    }
-                }
-                break;
-            case WORKFLOW_DATA:
-                generator = new QueryGenerator(WORKFLOW_DATA);
-//                generator.setParameter(WorkflowDataConstants.EXPERIMENT_ID, experimentID);
-                Experiment_Metadata experiment_metadata = em.find(Experiment_Metadata.class, expID);
-                generator.setParameter("experiment_metadata", experiment_metadata);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Workflow_Data workflowData = (Workflow_Data) result;
-                        WorkflowDataResource workflowDataResource = (WorkflowDataResource)Utils.getResource(ResourceType.WORKFLOW_DATA, workflowData);
-                        resourceList.add(workflowDataResource);
-                    }
-                }
-                break;
-            case EXECUTION_ERROR:
-                generator = new QueryGenerator(EXECUTION_ERROR);
-                generator.setParameter(ExecutionErrorConstants.EXPERIMENT_ID, expID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Execution_Error executionError = (Execution_Error)result;
-                        ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, executionError);
-                        resourceList.add(executionErrorResource);
-                    }
-                }
-                break;
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.EXPERIMENT_ID, expID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        GFac_Job_Data gFacJobData = (GFac_Job_Data)result;
-                        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData);
-                        resourceList.add(gFacJobDataResource);
-                    }
-                }
-                break;
-            default:
-                logger.error("Unsupported operation for experiment metadata resource "
-                        + "since there are no child resources generated by experiment metadata resource.. ",
-                        new UnsupportedOperationException());
-                throw new UnsupportedOperationException();
-        }
-        em.getTransaction().commit();
-        em.close();
-        return resourceList;
-    }
-
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        Experiment_Metadata existingExpMetaData = em.find(Experiment_Metadata.class, expID);
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Experiment_Metadata experimentMetadata = new Experiment_Metadata();
-        experimentMetadata.setExperiment_id(expID);
-        experimentMetadata.setExperiment_name(experimentName);
-        experimentMetadata.setShare_experiment(shareExp);
-        experimentMetadata.setDescription(description);
-        Gateway gatewayModel = em.find(Gateway.class, gateway.getGatewayName());
-        experimentMetadata.setGateway(gatewayModel);
-        Project projectModel = em.find(Project.class, project.getName());
-        experimentMetadata.setProject(projectModel);
-        experimentMetadata.setExecution_user(executionUser);
-        experimentMetadata.setSubmitted_date(submittedDate);
-        if (existingExpMetaData != null) {
-            existingExpMetaData.setExperiment_id(expID);
-            existingExpMetaData.setDescription(description);
-            existingExpMetaData.setExperiment_name(experimentName);
-            existingExpMetaData.setGateway(gatewayModel);
-            existingExpMetaData.setExecution_user(executionUser);
-            existingExpMetaData.setProject(projectModel);
-            existingExpMetaData.setShare_experiment(shareExp);
-            existingExpMetaData.setSubmitted_date(submittedDate);
-            experimentMetadata = em.merge(existingExpMetaData);
-        } else {
-            em.persist(experimentMetadata);
-        }
-        em.getTransaction().commit();
-        em.close();
-
-    }
-
-    public boolean isWorkflowInstancePresent(String workflowInstanceId){
-        return isExists(ResourceType.WORKFLOW_DATA, workflowInstanceId);
-    }
-
-    public boolean isGFacJobPresent(String jobId){
-        return isExists(ResourceType.GFAC_JOB_DATA, jobId);
-    }
-
-    public WorkflowDataResource getWorkflowInstance(String workflowInstanceId){
-        return (WorkflowDataResource)get(ResourceType.WORKFLOW_DATA, workflowInstanceId);
-    }
-
-    public List<Resource> getGFacJobs(){
-        return get(ResourceType.GFAC_JOB_DATA);
-    }
-
-    public List<WorkflowDataResource> getWorkflowInstances(){
-        return getResourceList(get(ResourceType.WORKFLOW_DATA),WorkflowDataResource.class);
-    }
-
-    public WorkflowDataResource createWorkflowInstanceResource(String workflowInstanceID){
-        WorkflowDataResource r=(WorkflowDataResource)create(ResourceType.WORKFLOW_DATA);
-        r.setWorkflowInstanceID(workflowInstanceID);
-        return r;
-    }
-
-    public GFacJobDataResource createGFacJob(String jobID){
-        GFacJobDataResource r=(GFacJobDataResource)create(ResourceType.GFAC_JOB_DATA);
-        r.setLocalJobID(jobID);
-        return r;
-    }
-
-    public ExperimentMetadataResource createExperimentMetadata(){
-        return (ExperimentMetadataResource)create(ResourceType.EXPERIMENT_METADATA);
-    }
-
-    public ExecutionErrorResource createExecutionError(){
-        return (ExecutionErrorResource) create(ResourceType.EXECUTION_ERROR);
-    }
-
-    public void removeWorkflowInstance(String workflowInstanceId){
-        remove(ResourceType.WORKFLOW_DATA, workflowInstanceId);
-    }
-
-    public List<ExecutionErrorResource> getExecutionErrors(String type, String experimentId, String workflowInstanceId, String nodeId, String gfacJobId){
-        List<ExecutionErrorResource> resourceList = new ArrayList<ExecutionErrorResource>();
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Query q;
-        QueryGenerator generator;
-        List<?> results;
-        generator = new QueryGenerator(EXECUTION_ERROR);
-        if (experimentId!=null){
-            generator.setParameter(ExecutionErrorConstants.EXPERIMENT_ID, experimentId);
-        }
-        if (type!=null){
-            generator.setParameter(ExecutionErrorConstants.SOURCE_TYPE, type);
-        }
-        if (workflowInstanceId!=null){
-            generator.setParameter(ExecutionErrorConstants.WORKFLOW_ID, workflowInstanceId);
-        }
-        if (nodeId!=null){
-            generator.setParameter(ExecutionErrorConstants.NODE_ID, nodeId);
-        }
-        if (gfacJobId!=null){
-            generator.setParameter(ExecutionErrorConstants.GFAC_JOB_ID, gfacJobId);
-        }
-        q = generator.selectQuery(em);
-        results = q.getResultList();
-        if (results.size() != 0) {
-            for (Object result : results) {
-                Execution_Error executionError = (Execution_Error)result;
-                ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, executionError);
-                resourceList.add(executionErrorResource);
-            }
-        }
-        em.getTransaction().commit();
-        em.close();
-        return resourceList;
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.*;
+//import org.apache.airavata.persistance.registry.jpa.utils.QueryGenerator;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import javax.persistence.Query;
+//import java.sql.Timestamp;
+//import java.util.ArrayList;
+//import java.util.List;
+//
+//public class ExperimentMetadataResource extends AbstractResource {
+//    private static final Logger logger = LoggerFactory.getLogger(ExperimentMetadataResource.class);
+//    private String expID;
+//    private String experimentName;
+//    private String description;
+//    private Timestamp submittedDate;
+//    private String executionUser;
+//    private GatewayResource gateway;
+//    private ProjectResource project;
+//    private boolean shareExp;
+//
+//    public static Logger getLogger() {
+//        return logger;
+//    }
+//
+//    public String getExpID() {
+//        return expID;
+//    }
+//
+//    public void setExpID(String expID) {
+//        this.expID = expID;
+//    }
+//
+//    public String getExperimentName() {
+//        return experimentName;
+//    }
+//
+//    public void setExperimentName(String experimentName) {
+//        this.experimentName = experimentName;
+//    }
+//
+//    public String getDescription() {
+//        return description;
+//    }
+//
+//    public void setDescription(String description) {
+//        this.description = description;
+//    }
+//
+//    public Timestamp getSubmittedDate() {
+//        return submittedDate;
+//    }
+//
+//    public void setSubmittedDate(Timestamp submittedDate) {
+//        this.submittedDate = submittedDate;
+//    }
+//
+//    public String getExecutionUser() {
+//        return executionUser;
+//    }
+//
+//    public void setExecutionUser(String executionUser) {
+//        this.executionUser = executionUser;
+//    }
+//
+//    public GatewayResource getGateway() {
+//        return gateway;
+//    }
+//
+//    public void setGateway(GatewayResource gateway) {
+//        this.gateway = gateway;
+//    }
+//
+//    public ProjectResource getProject() {
+//        return project;
+//    }
+//
+//    public void setProject(ProjectResource project) {
+//        this.project = project;
+//    }
+//
+//    public boolean isShareExp() {
+//        return shareExp;
+//    }
+//
+//    public void setShareExp(boolean shareExp) {
+//        this.shareExp = shareExp;
+//    }
+//
+//    public Resource create(ResourceType type) {
+//        switch (type) {
+//            case EXPERIMENT_CONFIG_DATA:
+//                ExperimentConfigDataResource configDataResource = new ExperimentConfigDataResource();
+//                configDataResource.setExMetadata(this);
+//                return configDataResource;
+//            case EXPERIMENT_SUMMARY:
+//                ExperimentSummaryResource summaryResource = new ExperimentSummaryResource();
+//                summaryResource.setExperimentMetadataResource(this);
+//                return summaryResource;
+//            case EXPERIMENT_INPUT:
+//                ExperimentInputResource exInputResource = new ExperimentInputResource();
+//                exInputResource.setExperimentMetadataResource(this);
+//                return exInputResource;
+//            case EXPERIMENT_OUTPUT:
+//                ExperimentOutputResource exOutputResouce = new ExperimentOutputResource();
+//                exOutputResouce.setExperimentMetadataResource(this);
+//                return exOutputResouce;
+//            case WORKFLOW_DATA:
+//                WorkflowDataResource workflowDataResource = new WorkflowDataResource();
+//                workflowDataResource.setExperimentID(expID);
+//                return workflowDataResource;
+//            case EXECUTION_ERROR:
+//                ExecutionErrorResource executionErrorResource = new ExecutionErrorResource();
+//                executionErrorResource.setMetadataResource(this);
+//                return executionErrorResource;
+//            case GFAC_JOB_DATA:
+//                GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
+//                gFacJobDataResource.setMetadataResource(this);
+//                return gFacJobDataResource;
+//            default:
+//                logger.error("Unsupported resource type for experiment metadata resource.", new IllegalArgumentException());
+//                throw new IllegalArgumentException("Unsupported resource type for gateway resource.");
+//            }
+//    }
+//
+//    public void remove(ResourceType type, Object name) {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Query q;
+//        QueryGenerator generator;
+//        switch (type){
+//            case EXPERIMENT_CONFIG_DATA:
+//                generator = new QueryGenerator(EXPERIMENT_CONFIG_DATA);
+//                generator.setParameter(ExperimentConfigurationDataConstants.EXPERIMENT_ID, name);
+//                q = generator.deleteQuery(em);
+//                q.executeUpdate();
+//                break;
+//            case EXPERIMENT_SUMMARY:
+//                generator = new QueryGenerator(EXPERIMENT_SUMMARY);
+//                generator.setParameter(ExperimentSummaryConstants.EXPERIMENT_ID, name);
+//                q = generator.deleteQuery(em);
+//                q.executeUpdate();
+//                break;
+//            case EXPERIMENT_INPUT:
+//                generator = new QueryGenerator(EXPERIMENT_INPUT);
+//                generator.setParameter(ExperimentInputConstants.EXPERIMENT_ID, name);
+//                q = generator.deleteQuery(em);
+//                q.executeUpdate();
+//                break;
+//            case EXPERIMENT_OUTPUT:
+//                generator = new QueryGenerator(EXPERIMENT_OUTPUT);
+//                generator.setParameter(ExperimentOutputConstants.EXPERIMENT_ID, name);
+//                q = generator.deleteQuery(em);
+//                q.executeUpdate();
+//                break;
+//            case WORKFLOW_DATA:
+//                generator = new QueryGenerator(WORKFLOW_DATA);
+//                generator.setParameter(WorkflowDataConstants.WORKFLOW_INSTANCE_ID, name);
+//                q = generator.deleteQuery(em);
+//                q.executeUpdate();
+//                break;
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
+//                q = generator.deleteQuery(em);
+//                q.executeUpdate();
+//                break;
+//            default:
+//                logger.error("Unsupported operation for experiment metadata resource "
+//                + "since there are no child resources generated by experiment metadata resource.. ",
+//                new UnsupportedOperationException());
+//                throw new UnsupportedOperationException();
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//
+//    public Resource get(ResourceType type, Object name) {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        QueryGenerator generator;
+//        Query q;
+//        switch (type) {
+//            case EXPERIMENT_CONFIG_DATA:
+//                generator = new QueryGenerator(EXPERIMENT_CONFIG_DATA);
+//                generator.setParameter(ExperimentConfigurationDataConstants.EXPERIMENT_ID, name);
+//                q = generator.selectQuery(em);
+//                Experiment_Configuration_Data exConfigData = (Experiment_Configuration_Data) q.getSingleResult();
+//                ExperimentConfigDataResource experimentConfigDataResource =
+//                        (ExperimentConfigDataResource)Utils.getResource(ResourceType.EXPERIMENT_CONFIG_DATA, exConfigData);
+//                em.getTransaction().commit();
+//                em.close();
+//                return experimentConfigDataResource;
+//            case EXPERIMENT_SUMMARY:
+//                generator = new QueryGenerator(EXPERIMENT_SUMMARY);
+//                generator.setParameter(ExperimentSummaryConstants.EXPERIMENT_ID, name);
+//                q = generator.selectQuery(em);
+//                Experiment_Summary exSummaryData = (Experiment_Summary) q.getSingleResult();
+//                ExperimentSummaryResource exSummary =
+//                        (ExperimentSummaryResource)Utils.getResource(ResourceType.EXPERIMENT_SUMMARY, exSummaryData);
+//                em.getTransaction().commit();
+//                em.close();
+//                return exSummary;
+//            case EXPERIMENT_INPUT:
+//                generator = new QueryGenerator(EXPERIMENT_INPUT);
+//                generator.setParameter(ExperimentInputConstants.EXPERIMENT_ID, name);
+//                q = generator.selectQuery(em);
+//                Experiment_Input exInput = (Experiment_Input) q.getSingleResult();
+//                ExperimentInputResource experimentInput =
+//                        (ExperimentInputResource)Utils.getResource(ResourceType.EXPERIMENT_INPUT, exInput);
+//                em.getTransaction().commit();
+//                em.close();
+//                return experimentInput;
+//            case EXPERIMENT_OUTPUT:
+//                generator = new QueryGenerator(EXPERIMENT_OUTPUT);
+//                generator.setParameter(ExperimentOutputConstants.EXPERIMENT_ID, name);
+//                q = generator.selectQuery(em);
+//                Experiment_Output exOutput = (Experiment_Output) q.getSingleResult();
+//                ExperimentOutputResource experimentOutput =
+//                        (ExperimentOutputResource)Utils.getResource(ResourceType.EXPERIMENT_OUTPUT, exOutput);
+//                em.getTransaction().commit();
+//                em.close();
+//                return experimentOutput;
+//            case WORKFLOW_DATA:
+//                generator = new QueryGenerator(WORKFLOW_DATA);
+////                generator.setParameter(WorkflowDataConstants.EXPERIMENT_ID, experimentID);
+//                generator.setParameter(WorkflowDataConstants.WORKFLOW_INSTANCE_ID, name);
+//                q = generator.selectQuery(em);
+//                Workflow_Data eworkflowData = (Workflow_Data)q.getSingleResult();
+//                WorkflowDataResource workflowDataResource = (WorkflowDataResource)Utils.getResource(ResourceType.WORKFLOW_DATA, eworkflowData);
+//                em.getTransaction().commit();
+//                em.close();
+//                return workflowDataResource;
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
+//                q = generator.selectQuery(em);
+//                GFac_Job_Data gFacJobData = (GFac_Job_Data)q.getSingleResult();
+//                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData);
+//                em.getTransaction().commit();
+//                em.close();
+//                return gFacJobDataResource;
+//            default:
+//                logger.error("Unsupported operation for experiment metadata resource "
+//                        + "since there are no child resources generated by experiment metadata resource.. ",
+//                        new UnsupportedOperationException());
+//                throw new UnsupportedOperationException();
+//        }
+//    }
+//
+//    public List<Resource> get(ResourceType type) {
+//        List<Resource> resourceList = new ArrayList<Resource>();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Query q;
+//        QueryGenerator generator;
+//        List results;
+//        switch (type){
+//            case EXPERIMENT_INPUT:
+//                generator = new QueryGenerator(EXPERIMENT_INPUT);
+//                generator.setParameter(ExperimentInputConstants.EXPERIMENT_ID, expID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Experiment_Input expInput = (Experiment_Input) result;
+//                        ExperimentInputResource experimentResource =
+//                                (ExperimentInputResource)Utils.getResource(ResourceType.EXPERIMENT_INPUT, expInput);
+//                        resourceList.add(experimentResource);
+//                    }
+//                }
+//                break;
+//            case EXPERIMENT_OUTPUT:
+//                generator = new QueryGenerator(EXPERIMENT_OUTPUT);
+//                generator.setParameter(ExperimentOutputConstants.EXPERIMENT_ID, expID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Experiment_Output expOutput = (Experiment_Output) result;
+//                        ExperimentOutputResource experimentResource =
+//                                (ExperimentOutputResource)Utils.getResource(ResourceType.EXPERIMENT_INPUT, expOutput);
+//                        resourceList.add(experimentResource);
+//                    }
+//                }
+//                break;
+//            case WORKFLOW_DATA:
+//                generator = new QueryGenerator(WORKFLOW_DATA);
+////                generator.setParameter(WorkflowDataConstants.EXPERIMENT_ID, experimentID);
+//                Experiment_Metadata experiment_metadata = em.find(Experiment_Metadata.class, expID);
+//                generator.setParameter("experiment_metadata", experiment_metadata);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Workflow_Data workflowData = (Workflow_Data) result;
+//                        WorkflowDataResource workflowDataResource = (WorkflowDataResource)Utils.getResource(ResourceType.WORKFLOW_DATA, workflowData);
+//                        resourceList.add(workflowDataResource);
+//                    }
+//                }
+//                break;
+//            case EXECUTION_ERROR:
+//                generator = new QueryGenerator(EXECUTION_ERROR);
+//                generator.setParameter(ExecutionErrorConstants.EXPERIMENT_ID, expID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Execution_Error executionError = (Execution_Error)result;
+//                        ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, executionError);
+//                        resourceList.add(executionErrorResource);
+//                    }
+//                }
+//                break;
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.EXPERIMENT_ID, expID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        GFac_Job_Data gFacJobData = (GFac_Job_Data)result;
+//                        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData);
+//                        resourceList.add(gFacJobDataResource);
+//                    }
+//                }
+//                break;
+//            default:
+//                logger.error("Unsupported operation for experiment metadata resource "
+//                        + "since there are no child resources generated by experiment metadata resource.. ",
+//                        new UnsupportedOperationException());
+//                throw new UnsupportedOperationException();
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//        return resourceList;
+//    }
+//
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        Experiment_Metadata existingExpMetaData = em.find(Experiment_Metadata.class, expID);
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Experiment_Metadata experimentMetadata = new Experiment_Metadata();
+//        experimentMetadata.setExperiment_id(expID);
+//        experimentMetadata.setExperiment_name(experimentName);
+//        experimentMetadata.setShare_experiment(shareExp);
+//        experimentMetadata.setDescription(description);
+//        Gateway gatewayModel = em.find(Gateway.class, gateway.getGatewayName());
+//        experimentMetadata.setGateway(gatewayModel);
+//        Project projectModel = em.find(Project.class, project.getName());
+//        experimentMetadata.setProject(projectModel);
+//        experimentMetadata.setExecution_user(executionUser);
+//        experimentMetadata.setSubmitted_date(submittedDate);
+//        if (existingExpMetaData != null) {
+//            existingExpMetaData.setExperiment_id(expID);
+//            existingExpMetaData.setDescription(description);
+//            existingExpMetaData.setExperiment_name(experimentName);
+//            existingExpMetaData.setGateway(gatewayModel);
+//            existingExpMetaData.setExecution_user(executionUser);
+//            existingExpMetaData.setProject(projectModel);
+//            existingExpMetaData.setShare_experiment(shareExp);
+//            existingExpMetaData.setSubmitted_date(submittedDate);
+//            experimentMetadata = em.merge(existingExpMetaData);
+//        } else {
+//            em.persist(experimentMetadata);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//
+//    }
+//
+//    public boolean isWorkflowInstancePresent(String workflowInstanceId){
+//        return isExists(ResourceType.WORKFLOW_DATA, workflowInstanceId);
+//    }
+//
+//    public boolean isGFacJobPresent(String jobId){
+//        return isExists(ResourceType.GFAC_JOB_DATA, jobId);
+//    }
+//
+//    public WorkflowDataResource getWorkflowInstance(String workflowInstanceId){
+//        return (WorkflowDataResource)get(ResourceType.WORKFLOW_DATA, workflowInstanceId);
+//    }
+//
+//    public List<Resource> getGFacJobs(){
+//        return get(ResourceType.GFAC_JOB_DATA);
+//    }
+//
+//    public List<WorkflowDataResource> getWorkflowInstances(){
+//        return getResourceList(get(ResourceType.WORKFLOW_DATA),WorkflowDataResource.class);
+//    }
+//
+//    public WorkflowDataResource createWorkflowInstanceResource(String workflowInstanceID){
+//        WorkflowDataResource r=(WorkflowDataResource)create(ResourceType.WORKFLOW_DATA);
+//        r.setWorkflowInstanceID(workflowInstanceID);
+//        return r;
+//    }
+//
+//    public GFacJobDataResource createGFacJob(String jobID){
+//        GFacJobDataResource r=(GFacJobDataResource)create(ResourceType.GFAC_JOB_DATA);
+//        r.setLocalJobID(jobID);
+//        return r;
+//    }
+//
+//    public ExperimentMetadataResource createExperimentMetadata(){
+//        return (ExperimentMetadataResource)create(ResourceType.EXPERIMENT_METADATA);
+//    }
+//
+//    public ExecutionErrorResource createExecutionError(){
+//        return (ExecutionErrorResource) create(ResourceType.EXECUTION_ERROR);
+//    }
+//
+//    public void removeWorkflowInstance(String workflowInstanceId){
+//        remove(ResourceType.WORKFLOW_DATA, workflowInstanceId);
+//    }
+//
+//    public List<ExecutionErrorResource> getExecutionErrors(String type, String experimentId, String workflowInstanceId, String nodeId, String gfacJobId){
+//        List<ExecutionErrorResource> resourceList = new ArrayList<ExecutionErrorResource>();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Query q;
+//        QueryGenerator generator;
+//        List<?> results;
+//        generator = new QueryGenerator(EXECUTION_ERROR);
+//        if (experimentId!=null){
+//            generator.setParameter(ExecutionErrorConstants.EXPERIMENT_ID, experimentId);
+//        }
+//        if (type!=null){
+//            generator.setParameter(ExecutionErrorConstants.SOURCE_TYPE, type);
+//        }
+//        if (workflowInstanceId!=null){
+//            generator.setParameter(ExecutionErrorConstants.WORKFLOW_ID, workflowInstanceId);
+//        }
+//        if (nodeId!=null){
+//            generator.setParameter(ExecutionErrorConstants.NODE_ID, nodeId);
+//        }
+//        if (gfacJobId!=null){
+//            generator.setParameter(ExecutionErrorConstants.GFAC_JOB_ID, gfacJobId);
+//        }
+//        q = generator.selectQuery(em);
+//        results = q.getResultList();
+//        if (results.size() != 0) {
+//            for (Object result : results) {
+//                Execution_Error executionError = (Execution_Error)result;
+//                ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, executionError);
+//                resourceList.add(executionErrorResource);
+//            }
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//        return resourceList;
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentOutputResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentOutputResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentOutputResource.java
index 6aa5fee..bcd70e8 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentOutputResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentOutputResource.java
@@ -1,113 +1,113 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Output;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Output_PK;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import java.util.List;
-
-public class ExperimentOutputResource extends AbstractResource {
-    private static final Logger logger = LoggerFactory.getLogger(ExperimentOutputResource.class);
-
-    private ExperimentMetadataResource experimentMetadataResource;
-    private String experimentKey;
-    private String value;
-
-    public ExperimentMetadataResource getExperimentMetadataResource() {
-        return experimentMetadataResource;
-    }
-
-    public void setExperimentMetadataResource(ExperimentMetadataResource experimentMetadataResource) {
-        this.experimentMetadataResource = experimentMetadataResource;
-    }
-
-    public String getExperimentKey() {
-        return experimentKey;
-    }
-
-    public void setExperimentKey(String experimentKey) {
-        this.experimentKey = experimentKey;
-    }
-
-    public String getValue() {
-        return value;
-    }
-
-    public void setValue(String value) {
-        this.value = value;
-    }
-
-    public Resource create(ResourceType type) {
-        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public List<Resource> get(ResourceType type) {
-        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        Experiment_Output existingOutput = em.find(Experiment_Output.class, new Experiment_Output_PK(experimentMetadataResource.getExpID(), experimentKey));
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Experiment_Output exOutput = new Experiment_Output();
-        exOutput.setEx_key(experimentKey);
-        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, experimentMetadataResource.getExpID());
-        exOutput.setExperiment_metadata(metadata);
-        exOutput.setExperiment_id(metadata.getExperiment_id());
-        exOutput.setValue(value);
-
-        if (existingOutput != null){
-            existingOutput.setEx_key(experimentKey);
-            existingOutput.setExperiment_metadata(metadata);
-            existingOutput.setValue(value);
-            existingOutput.setExperiment_id(metadata.getExperiment_id());
-            exOutput = em.merge(existingOutput);
-        }else {
-            em.persist(exOutput);
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Output;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Output_PK;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import java.util.List;
+//
+//public class ExperimentOutputResource extends AbstractResource {
+//    private static final Logger logger = LoggerFactory.getLogger(ExperimentOutputResource.class);
+//
+//    private ExperimentMetadataResource experimentMetadataResource;
+//    private String experimentKey;
+//    private String value;
+//
+//    public ExperimentMetadataResource getExperimentMetadataResource() {
+//        return experimentMetadataResource;
+//    }
+//
+//    public void setExperimentMetadataResource(ExperimentMetadataResource experimentMetadataResource) {
+//        this.experimentMetadataResource = experimentMetadataResource;
+//    }
+//
+//    public String getExperimentKey() {
+//        return experimentKey;
+//    }
+//
+//    public void setExperimentKey(String experimentKey) {
+//        this.experimentKey = experimentKey;
+//    }
+//
+//    public String getValue() {
+//        return value;
+//    }
+//
+//    public void setValue(String value) {
+//        this.value = value;
+//    }
+//
+//    public Resource create(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public List<Resource> get(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment output data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        Experiment_Output existingOutput = em.find(Experiment_Output.class, new Experiment_Output_PK(experimentMetadataResource.getExpID(), experimentKey));
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Experiment_Output exOutput = new Experiment_Output();
+//        exOutput.setEx_key(experimentKey);
+//        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, experimentMetadataResource.getExpID());
+//        exOutput.setExperiment_metadata(metadata);
+//        exOutput.setExperiment_id(metadata.getExperiment_id());
+//        exOutput.setValue(value);
+//
+//        if (existingOutput != null){
+//            existingOutput.setEx_key(experimentKey);
+//            existingOutput.setExperiment_metadata(metadata);
+//            existingOutput.setValue(value);
+//            existingOutput.setExperiment_id(metadata.getExperiment_id());
+//            exOutput = em.merge(existingOutput);
+//        }else {
+//            em.persist(exOutput);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentSummaryResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentSummaryResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentSummaryResource.java
index 6e841b9..08a2aed 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentSummaryResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentSummaryResource.java
@@ -1,112 +1,112 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Summary;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import java.sql.Timestamp;
-import java.util.List;
-
-public class ExperimentSummaryResource extends AbstractResource {
-    private static final Logger logger = LoggerFactory.getLogger(ExperimentSummaryResource.class);
-    private ExperimentMetadataResource experimentMetadataResource;
-    private String status;
-    private Timestamp lastUpdateTime;
-
-    public ExperimentMetadataResource getExperimentMetadataResource() {
-        return experimentMetadataResource;
-    }
-
-    public void setExperimentMetadataResource(ExperimentMetadataResource experimentMetadataResource) {
-        this.experimentMetadataResource = experimentMetadataResource;
-    }
-
-    public String getStatus() {
-        return status;
-    }
-
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    public Timestamp getLastUpdateTime() {
-        return lastUpdateTime;
-    }
-
-    public void setLastUpdateTime(Timestamp lastUpdateTime) {
-        this.lastUpdateTime = lastUpdateTime;
-    }
-
-    public Resource create(ResourceType type) {
-        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public List<Resource> get(ResourceType type) {
-        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        Experiment_Summary existingExSummary = em.find(Experiment_Summary.class, experimentMetadataResource.getExpID());
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Experiment_Summary exSummary = new Experiment_Summary();
-        exSummary.setLast_update_time(lastUpdateTime);
-        exSummary.setStatus(status);
-        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, experimentMetadataResource.getExpID());
-        exSummary.setExperiment_metadata(metadata);
-        exSummary.setExperimentID(metadata.getExperiment_id());
-
-        if (existingExSummary != null){
-            existingExSummary.setLast_update_time(lastUpdateTime);
-            existingExSummary.setStatus(status);
-            existingExSummary.setExperiment_metadata(metadata);
-            existingExSummary.setExperimentID(metadata.getExperiment_id());
-            exSummary = em.merge(existingExSummary);
-        }  else {
-            em.persist(exSummary);
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Summary;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import java.sql.Timestamp;
+//import java.util.List;
+//
+//public class ExperimentSummaryResource extends AbstractResource {
+//    private static final Logger logger = LoggerFactory.getLogger(ExperimentSummaryResource.class);
+//    private ExperimentMetadataResource experimentMetadataResource;
+//    private String status;
+//    private Timestamp lastUpdateTime;
+//
+//    public ExperimentMetadataResource getExperimentMetadataResource() {
+//        return experimentMetadataResource;
+//    }
+//
+//    public void setExperimentMetadataResource(ExperimentMetadataResource experimentMetadataResource) {
+//        this.experimentMetadataResource = experimentMetadataResource;
+//    }
+//
+//    public String getStatus() {
+//        return status;
+//    }
+//
+//    public void setStatus(String status) {
+//        this.status = status;
+//    }
+//
+//    public Timestamp getLastUpdateTime() {
+//        return lastUpdateTime;
+//    }
+//
+//    public void setLastUpdateTime(Timestamp lastUpdateTime) {
+//        this.lastUpdateTime = lastUpdateTime;
+//    }
+//
+//    public Resource create(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public List<Resource> get(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment summary data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        Experiment_Summary existingExSummary = em.find(Experiment_Summary.class, experimentMetadataResource.getExpID());
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Experiment_Summary exSummary = new Experiment_Summary();
+//        exSummary.setLast_update_time(lastUpdateTime);
+//        exSummary.setStatus(status);
+//        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, experimentMetadataResource.getExpID());
+//        exSummary.setExperiment_metadata(metadata);
+//        exSummary.setExperimentID(metadata.getExperiment_id());
+//
+//        if (existingExSummary != null){
+//            existingExSummary.setLast_update_time(lastUpdateTime);
+//            existingExSummary.setStatus(status);
+//            existingExSummary.setExperiment_metadata(metadata);
+//            existingExSummary.setExperimentID(metadata.getExperiment_id());
+//            exSummary = em.merge(existingExSummary);
+//        }  else {
+//            em.persist(exSummary);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobDataResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobDataResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobDataResource.java
index 35ea727..16a476d 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobDataResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/GFacJobDataResource.java
@@ -1,257 +1,257 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.*;
-import org.apache.airavata.persistance.registry.jpa.utils.QueryGenerator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.List;
-
-public class GFacJobDataResource extends AbstractResource {
-    private final static Logger logger = LoggerFactory.getLogger(GFacJobDataResource.class);
-    private ExperimentMetadataResource metadataResource;
-    private WorkflowDataResource workflowDataResource;
-    private String nodeID;
-    private String applicationDescID;
-    private String hostDescID;
-    private String serviceDescID;
-    private String jobData;
-    private String localJobID;
-    private Timestamp submittedTime;
-    private Timestamp statusUpdateTime;
-    private String status;
-    private String metadata;
-
-    public ExperimentMetadataResource getMetadataResource() {
-        return metadataResource;
-    }
-
-    public WorkflowDataResource getWorkflowDataResource() {
-        return workflowDataResource;
-    }
-
-    public String getNodeID() {
-        return nodeID;
-    }
-
-    public String getApplicationDescID() {
-        return applicationDescID;
-    }
-
-    public String getHostDescID() {
-        return hostDescID;
-    }
-
-    public String getServiceDescID() {
-        return serviceDescID;
-    }
-
-    public String getJobData() {
-        return jobData;
-    }
-
-    public String getLocalJobID() {
-        return localJobID;
-    }
-
-    public Timestamp getSubmittedTime() {
-        return submittedTime;
-    }
-
-    public Timestamp getStatusUpdateTime() {
-        return statusUpdateTime;
-    }
-
-    public String getStatus() {
-        return status;
-    }
-
-    public String getMetadata() {
-        return metadata;
-    }
-
-    public void setMetadataResource(ExperimentMetadataResource metadataResource) {
-        this.metadataResource = metadataResource;
-    }
-
-    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
-        this.workflowDataResource = workflowDataResource;
-    }
-
-    public void setNodeID(String nodeID) {
-        this.nodeID = nodeID;
-    }
-
-    public void setApplicationDescID(String applicationDescID) {
-        this.applicationDescID = applicationDescID;
-    }
-
-    public void setHostDescID(String hostDescID) {
-        this.hostDescID = hostDescID;
-    }
-
-    public void setServiceDescID(String serviceDescID) {
-        this.serviceDescID = serviceDescID;
-    }
-
-    public void setJobData(String jobData) {
-        this.jobData = jobData;
-    }
-
-    public void setLocalJobID(String localJobID) {
-        this.localJobID = localJobID;
-    }
-
-    public void setSubmittedTime(Timestamp submittedTime) {
-        this.submittedTime = submittedTime;
-    }
-
-    public void setStatusUpdateTime(Timestamp statusUpdateTime) {
-        this.statusUpdateTime = statusUpdateTime;
-    }
-
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    public void setMetadata(String metadata) {
-        this.metadata = metadata;
-    }
-
-    @Override
-    public Resource create(ResourceType type) {
-        switch (type){
-            case GFAC_JOB_STATUS:
-                GFacJobStatusResource gFacJobStatusResource = new GFacJobStatusResource();
-                gFacJobStatusResource.setLocalJobID(localJobID);
-                gFacJobStatusResource.setgFacJobDataResource(this);
-                return gFacJobStatusResource;
-            default:
-                logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
-                throw new UnsupportedOperationException();
-        }
-    }
-
-    @Override
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for GFac Job data resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for GFac Job data resource" ,new UnsupportedOperationException() );
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public List<Resource> get(ResourceType type) {
-        List<Resource> resourceList = new ArrayList<Resource>();
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Query q;
-        QueryGenerator generator;
-        List results;
-        switch (type){
-            case GFAC_JOB_STATUS:
-                generator = new QueryGenerator(GFAC_JOB_STATUS);
-                generator.setParameter(GFacJobStatusConstants.LOCAL_JOB_ID, localJobID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        GFac_Job_Status gFacJobStatus = (GFac_Job_Status) result;
-                        GFacJobStatusResource gFacJobStatusResource =
-                                (GFacJobStatusResource)Utils.getResource(ResourceType.GFAC_JOB_STATUS, gFacJobStatus);
-                        resourceList.add(gFacJobStatusResource);
-                    }
-                }
-                break;
-            default:
-                em.getTransaction().commit();
-                em.close();
-                logger.error("Unsupported resource type for gfac job data resource.", new IllegalArgumentException());
-                throw new IllegalArgumentException("Unsupported resource type for gfac job data resource.");
-        }
-        em.getTransaction().commit();
-        em.close();
-        return resourceList;
-    }
-
-    @Override
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        GFac_Job_Data existingGfacJobData = em.find(GFac_Job_Data.class, localJobID);
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        GFac_Job_Data gFacJobData = new GFac_Job_Data();
-        Experiment_Metadata experiment_data = em.find(Experiment_Metadata.class, metadataResource.getExpID());
-        gFacJobData.setExperiment_metadata(experiment_data);
-        gFacJobData.setExperiment_ID(metadataResource.getExpID());
-        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
-        gFacJobData.setWorkflow_Data(workflow_data);
-        gFacJobData.setWorkflow_instanceID(workflowDataResource.getWorkflowInstanceID());
-        gFacJobData.setNode_id(nodeID);
-        gFacJobData.setApplication_descriptor_ID(applicationDescID);
-        gFacJobData.setLocal_Job_ID(localJobID);
-        gFacJobData.setService_descriptor_ID(serviceDescID);
-        gFacJobData.setHost_descriptor_ID(hostDescID);
-        gFacJobData.setJob_data(jobData);
-        gFacJobData.setSubmitted_time(submittedTime);
-        gFacJobData.setStatus_update_time(statusUpdateTime);
-        gFacJobData.setStatus(status);
-        gFacJobData.setMetadata(metadata);
-        if(existingGfacJobData != null){
-            existingGfacJobData.setExperiment_metadata(experiment_data);
-            existingGfacJobData.setExperiment_ID(metadataResource.getExpID());
-            Workflow_Data workflow_data1 = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
-            existingGfacJobData.setWorkflow_Data(workflow_data1);
-            existingGfacJobData.setWorkflow_instanceID(workflowDataResource.getWorkflowInstanceID());
-            existingGfacJobData.setNode_id(nodeID);
-            existingGfacJobData.setApplication_descriptor_ID(applicationDescID);
-            existingGfacJobData.setLocal_Job_ID(localJobID);
-            existingGfacJobData.setService_descriptor_ID(serviceDescID);
-            existingGfacJobData.setHost_descriptor_ID(hostDescID);
-            existingGfacJobData.setJob_data(jobData);
-            existingGfacJobData.setSubmitted_time(submittedTime);
-            existingGfacJobData.setStatus_update_time(statusUpdateTime);
-            existingGfacJobData.setStatus(status);
-            existingGfacJobData.setMetadata(metadata);
-            gFacJobData = em.merge(existingGfacJobData);
-        }  else {
-            em.persist(gFacJobData);
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.*;
+//import org.apache.airavata.persistance.registry.jpa.utils.QueryGenerator;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import javax.persistence.Query;
+//import java.sql.Timestamp;
+//import java.util.ArrayList;
+//import java.util.List;
+//
+//public class GFacJobDataResource extends AbstractResource {
+//    private final static Logger logger = LoggerFactory.getLogger(GFacJobDataResource.class);
+//    private ExperimentMetadataResource metadataResource;
+//    private WorkflowDataResource workflowDataResource;
+//    private String nodeID;
+//    private String applicationDescID;
+//    private String hostDescID;
+//    private String serviceDescID;
+//    private String jobData;
+//    private String localJobID;
+//    private Timestamp submittedTime;
+//    private Timestamp statusUpdateTime;
+//    private String status;
+//    private String metadata;
+//
+//    public ExperimentMetadataResource getMetadataResource() {
+//        return metadataResource;
+//    }
+//
+//    public WorkflowDataResource getWorkflowDataResource() {
+//        return workflowDataResource;
+//    }
+//
+//    public String getNodeID() {
+//        return nodeID;
+//    }
+//
+//    public String getApplicationDescID() {
+//        return applicationDescID;
+//    }
+//
+//    public String getHostDescID() {
+//        return hostDescID;
+//    }
+//
+//    public String getServiceDescID() {
+//        return serviceDescID;
+//    }
+//
+//    public String getJobData() {
+//        return jobData;
+//    }
+//
+//    public String getLocalJobID() {
+//        return localJobID;
+//    }
+//
+//    public Timestamp getSubmittedTime() {
+//        return submittedTime;
+//    }
+//
+//    public Timestamp getStatusUpdateTime() {
+//        return statusUpdateTime;
+//    }
+//
+//    public String getStatus() {
+//        return status;
+//    }
+//
+//    public String getMetadata() {
+//        return metadata;
+//    }
+//
+//    public void setMetadataResource(ExperimentMetadataResource metadataResource) {
+//        this.metadataResource = metadataResource;
+//    }
+//
+//    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
+//        this.workflowDataResource = workflowDataResource;
+//    }
+//
+//    public void setNodeID(String nodeID) {
+//        this.nodeID = nodeID;
+//    }
+//
+//    public void setApplicationDescID(String applicationDescID) {
+//        this.applicationDescID = applicationDescID;
+//    }
+//
+//    public void setHostDescID(String hostDescID) {
+//        this.hostDescID = hostDescID;
+//    }
+//
+//    public void setServiceDescID(String serviceDescID) {
+//        this.serviceDescID = serviceDescID;
+//    }
+//
+//    public void setJobData(String jobData) {
+//        this.jobData = jobData;
+//    }
+//
+//    public void setLocalJobID(String localJobID) {
+//        this.localJobID = localJobID;
+//    }
+//
+//    public void setSubmittedTime(Timestamp submittedTime) {
+//        this.submittedTime = submittedTime;
+//    }
+//
+//    public void setStatusUpdateTime(Timestamp statusUpdateTime) {
+//        this.statusUpdateTime = statusUpdateTime;
+//    }
+//
+//    public void setStatus(String status) {
+//        this.status = status;
+//    }
+//
+//    public void setMetadata(String metadata) {
+//        this.metadata = metadata;
+//    }
+//
+//    @Override
+//    public Resource create(ResourceType type) {
+//        switch (type){
+//            case GFAC_JOB_STATUS:
+//                GFacJobStatusResource gFacJobStatusResource = new GFacJobStatusResource();
+//                gFacJobStatusResource.setLocalJobID(localJobID);
+//                gFacJobStatusResource.setgFacJobDataResource(this);
+//                return gFacJobStatusResource;
+//            default:
+//                logger.error("Unsupported resource type for GFac Job status resource" ,new UnsupportedOperationException() );
+//                throw new UnsupportedOperationException();
+//        }
+//    }
+//
+//    @Override
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for GFac Job data resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for GFac Job data resource" ,new UnsupportedOperationException() );
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public List<Resource> get(ResourceType type) {
+//        List<Resource> resourceList = new ArrayList<Resource>();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Query q;
+//        QueryGenerator generator;
+//        List results;
+//        switch (type){
+//            case GFAC_JOB_STATUS:
+//                generator = new QueryGenerator(GFAC_JOB_STATUS);
+//                generator.setParameter(GFacJobStatusConstants.LOCAL_JOB_ID, localJobID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        GFac_Job_Status gFacJobStatus = (GFac_Job_Status) result;
+//                        GFacJobStatusResource gFacJobStatusResource =
+//                                (GFacJobStatusResource)Utils.getResource(ResourceType.GFAC_JOB_STATUS, gFacJobStatus);
+//                        resourceList.add(gFacJobStatusResource);
+//                    }
+//                }
+//                break;
+//            default:
+//                em.getTransaction().commit();
+//                em.close();
+//                logger.error("Unsupported resource type for gfac job data resource.", new IllegalArgumentException());
+//                throw new IllegalArgumentException("Unsupported resource type for gfac job data resource.");
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//        return resourceList;
+//    }
+//
+//    @Override
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        GFac_Job_Data existingGfacJobData = em.find(GFac_Job_Data.class, localJobID);
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        GFac_Job_Data gFacJobData = new GFac_Job_Data();
+//        Experiment_Metadata experiment_data = em.find(Experiment_Metadata.class, metadataResource.getExpID());
+//        gFacJobData.setExperiment_metadata(experiment_data);
+//        gFacJobData.setExperiment_ID(metadataResource.getExpID());
+//        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
+//        gFacJobData.setWorkflow_Data(workflow_data);
+//        gFacJobData.setWorkflow_instanceID(workflowDataResource.getWorkflowInstanceID());
+//        gFacJobData.setNode_id(nodeID);
+//        gFacJobData.setApplication_descriptor_ID(applicationDescID);
+//        gFacJobData.setLocal_Job_ID(localJobID);
+//        gFacJobData.setService_descriptor_ID(serviceDescID);
+//        gFacJobData.setHost_descriptor_ID(hostDescID);
+//        gFacJobData.setJob_data(jobData);
+//        gFacJobData.setSubmitted_time(submittedTime);
+//        gFacJobData.setStatus_update_time(statusUpdateTime);
+//        gFacJobData.setStatus(status);
+//        gFacJobData.setMetadata(metadata);
+//        if(existingGfacJobData != null){
+//            existingGfacJobData.setExperiment_metadata(experiment_data);
+//            existingGfacJobData.setExperiment_ID(metadataResource.getExpID());
+//            Workflow_Data workflow_data1 = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
+//            existingGfacJobData.setWorkflow_Data(workflow_data1);
+//            existingGfacJobData.setWorkflow_instanceID(workflowDataResource.getWorkflowInstanceID());
+//            existingGfacJobData.setNode_id(nodeID);
+//            existingGfacJobData.setApplication_descriptor_ID(applicationDescID);
+//            existingGfacJobData.setLocal_Job_ID(localJobID);
+//            existingGfacJobData.setService_descriptor_ID(serviceDescID);
+//            existingGfacJobData.setHost_descriptor_ID(hostDescID);
+//            existingGfacJobData.setJob_data(jobData);
+//            existingGfacJobData.setSubmitted_time(submittedTime);
+//            existingGfacJobData.setStatus_update_time(statusUpdateTime);
+//            existingGfacJobData.setStatus(status);
+//            existingGfacJobData.setMetadata(metadata);
+//            gFacJobData = em.merge(existingGfacJobData);
+//        }  else {
+//            em.persist(gFacJobData);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//
+//}


[7/8] new datamodels for AIRAVATA-1017

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
index d51e549..11c215e 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
@@ -23,11 +23,12 @@ package org.apache.airavata.persistance.registry.jpa.impl;
 
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.model.experiment.*;
+import org.apache.airavata.model.experiment.BasicMetadata;
+import org.apache.airavata.model.experiment.ConfigurationData;
 import org.apache.airavata.persistance.registry.jpa.Resource;
 import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.resources.*;
-import org.apache.airavata.persistance.registry.jpa.utils.ThriftDataModelConversion;
+import org.apache.airavata.persistance.registry.jpa.resources.GatewayResource;
+import org.apache.airavata.persistance.registry.jpa.resources.WorkerResource;
 import org.apache.airavata.registry.cpi.utils.Constants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,6 +36,8 @@ import org.slf4j.LoggerFactory;
 import java.sql.Timestamp;
 import java.util.*;
 
+//import org.apache.airavata.persistance.registry.jpa.utils.ThriftDataModelConversion;
+
 public class ExperimentRegistry {
     private GatewayRegistry gatewayRegistry;
     private UserReg userReg;
@@ -48,15 +51,15 @@ public class ExperimentRegistry {
             userReg = new UserReg();
             WorkerResource worker = userReg.getSysteUser();
             experimentID = getExperimentID(basicMetadata.getExperimentName());
-            ExperimentMetadataResource exBasicData = gateway.createBasicMetada(experimentID);
-            exBasicData.setExperimentName(basicMetadata.getExperimentName());
-            exBasicData.setDescription(basicMetadata.getExperimentDescription());
-            exBasicData.setExecutionUser(basicMetadata.getUserName());
-            exBasicData.setSubmittedDate(getCurrentTimestamp());
-            exBasicData.setShareExp(basicMetadata.isSetShareExperimentPublicly());
-            ProjectResource projectResource = worker.getProject(basicMetadata.getProjectID());
-            exBasicData.setProject(projectResource);
-            exBasicData.save();
+//            ExperimentMetadataResource exBasicData = gateway.createBasicMetada(experimentID);
+//            exBasicData.setExperimentName(basicMetadata.getExperimentName());
+//            exBasicData.setDescription(basicMetadata.getExperimentDescription());
+//            exBasicData.setExecutionUser(basicMetadata.getUserName());
+//            exBasicData.setSubmittedDate(getCurrentTimestamp());
+//            exBasicData.setShareExp(basicMetadata.isSetShareExperimentPublicly());
+//            ProjectResource projectResource = worker.getProject(basicMetadata.getProjectID());
+//            exBasicData.setProject(projectResource);
+//            exBasicData.save();
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties", e.getMessage());
         }
@@ -67,75 +70,75 @@ public class ExperimentRegistry {
         try {
             gatewayRegistry = new GatewayRegistry();
             GatewayResource gateway = gatewayRegistry.getDefaultGateway();
-            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, experimentID);
-            ExperimentConfigDataResource exConfigData = (ExperimentConfigDataResource) exBasicData.create(ResourceType.EXPERIMENT_CONFIG_DATA);
-            BasicMetadata updatedBasicMetadata = configurationData.getBasicMetadata();
-            if (updatedBasicMetadata != null) {
-                if (updatedBasicMetadata.getExperimentName() != null && !updatedBasicMetadata.getExperimentName().equals("")) {
-                    exBasicData.setExperimentName(updatedBasicMetadata.getExperimentName());
-                }
-                if (updatedBasicMetadata.getExperimentDescription() != null && !updatedBasicMetadata.getExperimentDescription().equals("")) {
-                    exBasicData.setDescription(updatedBasicMetadata.getExperimentDescription());
-                }
-                if (updatedBasicMetadata.getUserName() != null && !updatedBasicMetadata.getUserName().equals("")) {
-                    exBasicData.setExecutionUser(updatedBasicMetadata.getUserName());
-                }
-                exBasicData.setShareExp(updatedBasicMetadata.isSetShareExperimentPublicly());
-                exBasicData.save();
-            }
-            exConfigData.setExMetadata(exBasicData);
-            exConfigData.setApplicationID(configurationData.getApplicationId());
-            exConfigData.setApplicationVersion(configurationData.getApplicationVersion());
-            exConfigData.setWorkflowTemplateId(configurationData.getWorkflowTemplateId());
-            exConfigData.setWorkflowTemplateVersion(configurationData.getWorklfowTemplateVersion());
-
-            ComputationalResourceScheduling resourceScheduling = configurationData.getComputationalResourceScheduling();
-            if (resourceScheduling != null) {
-                exConfigData.setCpuCount(resourceScheduling.getTotalCPUCount());
-                exConfigData.setAiravataAutoSchedule(resourceScheduling.isAiravataAutoSchedule());
-                exConfigData.setOverrideManualSchedule(resourceScheduling.isOverrideManualScheduledParams());
-                exConfigData.setResourceHostID(resourceScheduling.getResourceHostId());
-                exConfigData.setNodeCount(resourceScheduling.getNodeCount());
-                exConfigData.setNumberOfThreads(resourceScheduling.getNumberOfThreads());
-                exConfigData.setQueueName(resourceScheduling.getQueueName());
-                exConfigData.setWallTimeLimit(resourceScheduling.getWallTimeLimit());
-                exConfigData.setJobStartTime(getTime(resourceScheduling.getJobStartTime()));
-                exConfigData.setPhysicalMemory(resourceScheduling.getTotalPhysicalMemory());
-                exConfigData.setProjectAccount(resourceScheduling.getComputationalProjectAccount());
-            }
-
-            AdvancedInputDataHandling inputDataHandling = configurationData.getAdvanceInputDataHandling();
-            if (inputDataHandling != null) {
-                exConfigData.setStageInputsToWDir(inputDataHandling.isStageInputFilesToWorkingDir());
-                exConfigData.setWorkingDirParent(inputDataHandling.getWorkingDirectoryParent());
-                exConfigData.setWorkingDir(inputDataHandling.getUniqueWorkingDirectory());
-                exConfigData.setCleanAfterJob(inputDataHandling.isCleanUpWorkingDirAfterJob());
-            }
-
-            AdvancedOutputDataHandling outputDataHandling = configurationData.getAdvanceOutputDataHandling();
-            if (outputDataHandling != null) {
-                exConfigData.setOutputDataDir(outputDataHandling.getOutputdataDir());
-                exConfigData.setDataRegURL(outputDataHandling.getDataRegistryURL());
-                exConfigData.setPersistOutputData(outputDataHandling.isPersistOutputData());
-            }
-
-            QualityOfServiceParams qosParams = configurationData.getQosParams();
-            if (qosParams != null) {
-                exConfigData.setStartExecutionAt(qosParams.getStartExecutionAt());
-                exConfigData.setExecuteBefore(qosParams.getExecuteBefore());
-                exConfigData.setNumberOfRetries(qosParams.getNumberofRetries());
-            }
-
-            Map<String, String> experimentInputs = configurationData.getExperimentInputs();
-            for (String inputKey : experimentInputs.keySet()) {
-                ExperimentInputResource exInputResource = (ExperimentInputResource) exBasicData.create(ResourceType.EXPERIMENT_INPUT);
-                String value = experimentInputs.get(inputKey);
-                exInputResource.setExperimentKey(inputKey);
-                exInputResource.setValue(value);
-                exInputResource.setExperimentMetadataResource(exBasicData);
-                exInputResource.save();
-            }
-            exConfigData.save();
+//            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, experimentID);
+//            ExperimentConfigDataResource exConfigData = (ExperimentConfigDataResource) exBasicData.create(ResourceType.EXPERIMENT_CONFIG_DATA);
+//            BasicMetadata updatedBasicMetadata = configurationData.getBasicMetadata();
+//            if (updatedBasicMetadata != null) {
+//                if (updatedBasicMetadata.getExperimentName() != null && !updatedBasicMetadata.getExperimentName().equals("")) {
+//                    exBasicData.setExperimentName(updatedBasicMetadata.getExperimentName());
+//                }
+//                if (updatedBasicMetadata.getExperimentDescription() != null && !updatedBasicMetadata.getExperimentDescription().equals("")) {
+//                    exBasicData.setDescription(updatedBasicMetadata.getExperimentDescription());
+//                }
+//                if (updatedBasicMetadata.getUserName() != null && !updatedBasicMetadata.getUserName().equals("")) {
+//                    exBasicData.setExecutionUser(updatedBasicMetadata.getUserName());
+//                }
+//                exBasicData.setShareExp(updatedBasicMetadata.isSetShareExperimentPublicly());
+//                exBasicData.save();
+//            }
+//            exConfigData.setExMetadata(exBasicData);
+//            exConfigData.setApplicationID(configurationData.getApplicationId());
+//            exConfigData.setApplicationVersion(configurationData.getApplicationVersion());
+//            exConfigData.setWorkflowTemplateId(configurationData.getWorkflowTemplateId());
+//            exConfigData.setWorkflowTemplateVersion(configurationData.getWorklfowTemplateVersion());
+//
+//            ComputationalResourceScheduling resourceScheduling = configurationData.getComputationalResourceScheduling();
+//            if (resourceScheduling != null) {
+//                exConfigData.setCpuCount(resourceScheduling.getTotalCPUCount());
+//                exConfigData.setAiravataAutoSchedule(resourceScheduling.isAiravataAutoSchedule());
+//                exConfigData.setOverrideManualSchedule(resourceScheduling.isOverrideManualScheduledParams());
+//                exConfigData.setResourceHostID(resourceScheduling.getResourceHostId());
+//                exConfigData.setNodeCount(resourceScheduling.getNodeCount());
+//                exConfigData.setNumberOfThreads(resourceScheduling.getNumberOfThreads());
+//                exConfigData.setQueueName(resourceScheduling.getQueueName());
+//                exConfigData.setWallTimeLimit(resourceScheduling.getWallTimeLimit());
+//                exConfigData.setJobStartTime(getTime(resourceScheduling.getJobStartTime()));
+//                exConfigData.setPhysicalMemory(resourceScheduling.getTotalPhysicalMemory());
+//                exConfigData.setProjectAccount(resourceScheduling.getComputationalProjectAccount());
+//            }
+//
+//            AdvancedInputDataHandling inputDataHandling = configurationData.getAdvanceInputDataHandling();
+//            if (inputDataHandling != null) {
+//                exConfigData.setStageInputsToWDir(inputDataHandling.isStageInputFilesToWorkingDir());
+//                exConfigData.setWorkingDirParent(inputDataHandling.getWorkingDirectoryParent());
+//                exConfigData.setWorkingDir(inputDataHandling.getUniqueWorkingDirectory());
+//                exConfigData.setCleanAfterJob(inputDataHandling.isCleanUpWorkingDirAfterJob());
+//            }
+//
+//            AdvancedOutputDataHandling outputDataHandling = configurationData.getAdvanceOutputDataHandling();
+//            if (outputDataHandling != null) {
+//                exConfigData.setOutputDataDir(outputDataHandling.getOutputdataDir());
+//                exConfigData.setDataRegURL(outputDataHandling.getDataRegistryURL());
+//                exConfigData.setPersistOutputData(outputDataHandling.isPersistOutputData());
+//            }
+//
+//            QualityOfServiceParams qosParams = configurationData.getQosParams();
+//            if (qosParams != null) {
+//                exConfigData.setStartExecutionAt(qosParams.getStartExecutionAt());
+//                exConfigData.setExecuteBefore(qosParams.getExecuteBefore());
+//                exConfigData.setNumberOfRetries(qosParams.getNumberofRetries());
+//            }
+//
+//            Map<String, String> experimentInputs = configurationData.getExperimentInputs();
+//            for (String inputKey : experimentInputs.keySet()) {
+//                ExperimentInputResource exInputResource = (ExperimentInputResource) exBasicData.create(ResourceType.EXPERIMENT_INPUT);
+//                String value = experimentInputs.get(inputKey);
+//                exInputResource.setExperimentKey(inputKey);
+//                exInputResource.setValue(value);
+//                exInputResource.setExperimentMetadataResource(exBasicData);
+//                exInputResource.save();
+//            }
+//            exConfigData.save();
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties", e.getMessage());
         }
@@ -162,22 +165,22 @@ public class ExperimentRegistry {
     public void updateExpBasicMetadataField(String expID, String fieldName, Object value) {
         try {
             GatewayResource gateway = gatewayRegistry.getDefaultGateway();
-            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expID);
-            if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_NAME)) {
-                exBasicData.setExperimentName((String) value);
-                exBasicData.save();
-            } else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.USER_NAME)) {
-                exBasicData.setExecutionUser((String) value);
-                exBasicData.save();
-            } else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_DESC)) {
-                exBasicData.setDescription((String) value);
-                exBasicData.save();
-            } else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.SHARE_EXP_PUBLIC)) {
-                exBasicData.setShareExp((Boolean) value);
-                exBasicData.save();
-            }else {
+//            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expID);
+//            if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_NAME)) {
+//                exBasicData.setExperimentName((String) value);
+//                exBasicData.save();
+//            } else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.USER_NAME)) {
+//                exBasicData.setExecutionUser((String) value);
+//                exBasicData.save();
+//            } else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_DESC)) {
+//                exBasicData.setDescription((String) value);
+//                exBasicData.save();
+//            } else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.SHARE_EXP_PUBLIC)) {
+//                exBasicData.setShareExp((Boolean) value);
+//                exBasicData.save();
+//            }else {
                 logger.error("Unsupported field type for Experiment basic metadata");
-            }
+//            }
 
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties", e.getMessage());
@@ -187,70 +190,70 @@ public class ExperimentRegistry {
     public void updateExpConfigDataField(String expID, String fieldName, Object value) {
         try {
             GatewayResource gateway = gatewayRegistry.getDefaultGateway();
-            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expID);
-            ExperimentConfigDataResource exConfigData = (ExperimentConfigDataResource)exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA, expID);
-            if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_ID)) {
-                exConfigData.setApplicationID((String) value);
-                exConfigData.save();
-            } else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_VERSION)) {
-                exConfigData.setApplicationVersion((String) value);
-                exConfigData.save();
-            } else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_ID)) {
-                exConfigData.setWorkflowTemplateId((String) value);
-                exConfigData.save();
-            } else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_VERSION)) {
-                exConfigData.setWorkflowTemplateVersion((String) value);
-                exConfigData.save();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.COMPUTATIONAL_RESOURCE_SCHEDULING)){
-                ComputationalResourceScheduling resourceScheduling = (ComputationalResourceScheduling)value;
-                exConfigData.setCpuCount(resourceScheduling.getTotalCPUCount());
-                exConfigData.setAiravataAutoSchedule(resourceScheduling.isAiravataAutoSchedule());
-                exConfigData.setOverrideManualSchedule(resourceScheduling.isOverrideManualScheduledParams());
-                exConfigData.setResourceHostID(resourceScheduling.getResourceHostId());
-                exConfigData.setNodeCount(resourceScheduling.getNodeCount());
-                exConfigData.setNumberOfThreads(resourceScheduling.getNumberOfThreads());
-                exConfigData.setQueueName(resourceScheduling.getQueueName());
-                exConfigData.setWallTimeLimit(resourceScheduling.getWallTimeLimit());
-                exConfigData.setJobStartTime(getTime(resourceScheduling.getJobStartTime()));
-                exConfigData.setPhysicalMemory(resourceScheduling.getTotalPhysicalMemory());
-                exConfigData.setProjectAccount(resourceScheduling.getComputationalProjectAccount());
-                exConfigData.save();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_INPUT_HANDLING)){
-                AdvancedInputDataHandling adInputHandling = (AdvancedInputDataHandling)value;
-                exConfigData.setStageInputsToWDir(adInputHandling.isStageInputFilesToWorkingDir());
-                exConfigData.setWorkingDirParent(adInputHandling.getWorkingDirectoryParent());
-                exConfigData.setWorkingDir(adInputHandling.getUniqueWorkingDirectory());
-                exConfigData.setCleanAfterJob(adInputHandling.isCleanUpWorkingDirAfterJob());
-                exConfigData.save();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_OUTPUT_HANDLING)){
-                AdvancedOutputDataHandling adOutputHandling = (AdvancedOutputDataHandling)value;
-                exConfigData.setOutputDataDir(adOutputHandling.getOutputdataDir());
-                exConfigData.setDataRegURL(adOutputHandling.getDataRegistryURL());
-                exConfigData.setPersistOutputData(adOutputHandling.isPersistOutputData());
-                exConfigData.save();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.QOS_PARAMS)){
-                QualityOfServiceParams qosParams = (QualityOfServiceParams)value;
-                exConfigData.setStartExecutionAt(qosParams.getStartExecutionAt());
-                exConfigData.setExecuteBefore(qosParams.getExecuteBefore());
-                exConfigData.setNumberOfRetries(qosParams.getNumberofRetries());
-                exConfigData.save();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.EXPERIMENT_INPUTS)){
-                if (value instanceof Map){
-                    Map<String, String> experimentInputs = (HashMap<String, String>)value;
-                    List<Resource> exInputs = exBasicData.get(ResourceType.EXPERIMENT_INPUT);
-                    int i = 0;
-                    for (String exInputKey : experimentInputs.keySet()){
-                        ExperimentInputResource exInput = (ExperimentInputResource)exInputs.get(i);
-                        if (exInput.getExperimentKey().equals(exInputKey)){
-                            exInput.setValue(experimentInputs.get(exInputKey));
-                            exInput.save();
-                        }
-                        i++;
-                    }
-                }
-            }else {
-                logger.error("Unsupported field type for Experiment config data");
-            }
+//            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expID);
+//            ExperimentConfigDataResource exConfigData = (ExperimentConfigDataResource)exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA, expID);
+//            if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_ID)) {
+//                exConfigData.setApplicationID((String) value);
+//                exConfigData.save();
+//            } else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_VERSION)) {
+//                exConfigData.setApplicationVersion((String) value);
+//                exConfigData.save();
+//            } else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_ID)) {
+//                exConfigData.setWorkflowTemplateId((String) value);
+//                exConfigData.save();
+//            } else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_VERSION)) {
+//                exConfigData.setWorkflowTemplateVersion((String) value);
+//                exConfigData.save();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.COMPUTATIONAL_RESOURCE_SCHEDULING)){
+//                ComputationalResourceScheduling resourceScheduling = (ComputationalResourceScheduling)value;
+//                exConfigData.setCpuCount(resourceScheduling.getTotalCPUCount());
+//                exConfigData.setAiravataAutoSchedule(resourceScheduling.isAiravataAutoSchedule());
+//                exConfigData.setOverrideManualSchedule(resourceScheduling.isOverrideManualScheduledParams());
+//                exConfigData.setResourceHostID(resourceScheduling.getResourceHostId());
+//                exConfigData.setNodeCount(resourceScheduling.getNodeCount());
+//                exConfigData.setNumberOfThreads(resourceScheduling.getNumberOfThreads());
+//                exConfigData.setQueueName(resourceScheduling.getQueueName());
+//                exConfigData.setWallTimeLimit(resourceScheduling.getWallTimeLimit());
+//                exConfigData.setJobStartTime(getTime(resourceScheduling.getJobStartTime()));
+//                exConfigData.setPhysicalMemory(resourceScheduling.getTotalPhysicalMemory());
+//                exConfigData.setProjectAccount(resourceScheduling.getComputationalProjectAccount());
+//                exConfigData.save();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_INPUT_HANDLING)){
+//                AdvancedInputDataHandling adInputHandling = (AdvancedInputDataHandling)value;
+//                exConfigData.setStageInputsToWDir(adInputHandling.isStageInputFilesToWorkingDir());
+//                exConfigData.setWorkingDirParent(adInputHandling.getWorkingDirectoryParent());
+//                exConfigData.setWorkingDir(adInputHandling.getUniqueWorkingDirectory());
+//                exConfigData.setCleanAfterJob(adInputHandling.isCleanUpWorkingDirAfterJob());
+//                exConfigData.save();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_OUTPUT_HANDLING)){
+//                AdvancedOutputDataHandling adOutputHandling = (AdvancedOutputDataHandling)value;
+//                exConfigData.setOutputDataDir(adOutputHandling.getOutputdataDir());
+//                exConfigData.setDataRegURL(adOutputHandling.getDataRegistryURL());
+//                exConfigData.setPersistOutputData(adOutputHandling.isPersistOutputData());
+//                exConfigData.save();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.QOS_PARAMS)){
+//                QualityOfServiceParams qosParams = (QualityOfServiceParams)value;
+//                exConfigData.setStartExecutionAt(qosParams.getStartExecutionAt());
+//                exConfigData.setExecuteBefore(qosParams.getExecuteBefore());
+//                exConfigData.setNumberOfRetries(qosParams.getNumberofRetries());
+//                exConfigData.save();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.EXPERIMENT_INPUTS)){
+//                if (value instanceof Map){
+//                    Map<String, String> experimentInputs = (HashMap<String, String>)value;
+//                    List<Resource> exInputs = exBasicData.get(ResourceType.EXPERIMENT_INPUT);
+//                    int i = 0;
+//                    for (String exInputKey : experimentInputs.keySet()){
+//                        ExperimentInputResource exInput = (ExperimentInputResource)exInputs.get(i);
+//                        if (exInput.getExperimentKey().equals(exInputKey)){
+//                            exInput.setValue(experimentInputs.get(exInputKey));
+//                            exInput.save();
+//                        }
+//                        i++;
+//                    }
+//                }
+//            }else {
+//                logger.error("Unsupported field type for Experiment config data");
+//            }
 
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties", e.getMessage());
@@ -260,87 +263,87 @@ public class ExperimentRegistry {
     public void updateBasicData(BasicMetadata basicMetadata, String expId) throws ApplicationSettingsException {
         GatewayResource gateway = gatewayRegistry.getDefaultGateway();
         WorkerResource worker = userReg.getSysteUser();
-        ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
-        exBasicData.setExperimentName(basicMetadata.getExperimentName());
-        exBasicData.setDescription(basicMetadata.getExperimentDescription());
-        exBasicData.setExecutionUser(basicMetadata.getUserName());
-        exBasicData.setSubmittedDate(getCurrentTimestamp());
-        exBasicData.setShareExp(basicMetadata.isSetShareExperimentPublicly());
-        exBasicData.setProject(worker.getProject(basicMetadata.getProjectID()));
-        exBasicData.save();
+//        ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
+//        exBasicData.setExperimentName(basicMetadata.getExperimentName());
+//        exBasicData.setDescription(basicMetadata.getExperimentDescription());
+//        exBasicData.setExecutionUser(basicMetadata.getUserName());
+//        exBasicData.setSubmittedDate(getCurrentTimestamp());
+//        exBasicData.setShareExp(basicMetadata.isSetShareExperimentPublicly());
+//        exBasicData.setProject(worker.getProject(basicMetadata.getProjectID()));
+//        exBasicData.save();
     }
 
     public void updateExpConfigData(ConfigurationData configData, String expId) throws ApplicationSettingsException {
         GatewayResource gateway = gatewayRegistry.getDefaultGateway();
-        ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
-        ExperimentConfigDataResource exConfigResource = (ExperimentConfigDataResource) exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA, expId);
-        BasicMetadata updatedBasicMetadata = configData.getBasicMetadata();
-        if (updatedBasicMetadata != null) {
-            if (updatedBasicMetadata.getExperimentName() != null && !updatedBasicMetadata.getExperimentName().equals("")) {
-                exBasicData.setExperimentName(updatedBasicMetadata.getExperimentName());
-            }
-            if (updatedBasicMetadata.getExperimentDescription() != null && !updatedBasicMetadata.getExperimentDescription().equals("")) {
-                exBasicData.setDescription(updatedBasicMetadata.getExperimentDescription());
-            }
-            if (updatedBasicMetadata.getUserName() != null && !updatedBasicMetadata.getUserName().equals("")) {
-                exBasicData.setExecutionUser(updatedBasicMetadata.getUserName());
-            }
-            exBasicData.setShareExp(updatedBasicMetadata.isSetShareExperimentPublicly());
-            exBasicData.save();
-        }
-        exConfigResource.setExMetadata(exBasicData);
-        exConfigResource.setApplicationID(configData.getApplicationId());
-        exConfigResource.setApplicationVersion(configData.getApplicationVersion());
-        exConfigResource.setWorkflowTemplateId(configData.getWorkflowTemplateId());
-        exConfigResource.setWorkflowTemplateVersion(configData.getWorklfowTemplateVersion());
-
-        ComputationalResourceScheduling resourceScheduling = configData.getComputationalResourceScheduling();
-        if (resourceScheduling != null) {
-            exConfigResource.setCpuCount(resourceScheduling.getTotalCPUCount());
-            exConfigResource.setAiravataAutoSchedule(resourceScheduling.isAiravataAutoSchedule());
-            exConfigResource.setOverrideManualSchedule(resourceScheduling.isOverrideManualScheduledParams());
-            exConfigResource.setResourceHostID(resourceScheduling.getResourceHostId());
-            exConfigResource.setNodeCount(resourceScheduling.getNodeCount());
-            exConfigResource.setNumberOfThreads(resourceScheduling.getNumberOfThreads());
-            exConfigResource.setQueueName(resourceScheduling.getQueueName());
-            exConfigResource.setWallTimeLimit(resourceScheduling.getWallTimeLimit());
-            exConfigResource.setJobStartTime(getTime(resourceScheduling.getJobStartTime()));
-            exConfigResource.setPhysicalMemory(resourceScheduling.getTotalPhysicalMemory());
-            exConfigResource.setProjectAccount(resourceScheduling.getComputationalProjectAccount());
-        }
-
-        AdvancedInputDataHandling inputDataHandling = configData.getAdvanceInputDataHandling();
-        if (inputDataHandling != null) {
-            exConfigResource.setStageInputsToWDir(inputDataHandling.isStageInputFilesToWorkingDir());
-            exConfigResource.setWorkingDirParent(inputDataHandling.getWorkingDirectoryParent());
-            exConfigResource.setWorkingDir(inputDataHandling.getUniqueWorkingDirectory());
-            exConfigResource.setCleanAfterJob(inputDataHandling.isCleanUpWorkingDirAfterJob());
-        }
-
-        AdvancedOutputDataHandling outputDataHandling = configData.getAdvanceOutputDataHandling();
-        if (outputDataHandling != null) {
-            exConfigResource.setOutputDataDir(outputDataHandling.getOutputdataDir());
-            exConfigResource.setDataRegURL(outputDataHandling.getDataRegistryURL());
-            exConfigResource.setPersistOutputData(outputDataHandling.isPersistOutputData());
-        }
-
-        QualityOfServiceParams qosParams = configData.getQosParams();
-        if (qosParams != null) {
-            exConfigResource.setStartExecutionAt(qosParams.getStartExecutionAt());
-            exConfigResource.setExecuteBefore(qosParams.getExecuteBefore());
-            exConfigResource.setNumberOfRetries(qosParams.getNumberofRetries());
-        }
-
-        Map<String, String> experimentInputs = configData.getExperimentInputs();
-        for (String inputKey : experimentInputs.keySet()) {
-            ExperimentInputResource exInputResource = (ExperimentInputResource) exBasicData.create(ResourceType.EXPERIMENT_INPUT);
-            String value = experimentInputs.get(inputKey);
-            exInputResource.setExperimentKey(inputKey);
-            exInputResource.setValue(value);
-            exInputResource.setExperimentMetadataResource(exBasicData);
-            exInputResource.save();
-        }
-        exConfigResource.save();
+//        ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
+//        ExperimentConfigDataResource exConfigResource = (ExperimentConfigDataResource) exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA, expId);
+//        BasicMetadata updatedBasicMetadata = configData.getBasicMetadata();
+//        if (updatedBasicMetadata != null) {
+//            if (updatedBasicMetadata.getExperimentName() != null && !updatedBasicMetadata.getExperimentName().equals("")) {
+//                exBasicData.setExperimentName(updatedBasicMetadata.getExperimentName());
+//            }
+//            if (updatedBasicMetadata.getExperimentDescription() != null && !updatedBasicMetadata.getExperimentDescription().equals("")) {
+//                exBasicData.setDescription(updatedBasicMetadata.getExperimentDescription());
+//            }
+//            if (updatedBasicMetadata.getUserName() != null && !updatedBasicMetadata.getUserName().equals("")) {
+//                exBasicData.setExecutionUser(updatedBasicMetadata.getUserName());
+//            }
+//            exBasicData.setShareExp(updatedBasicMetadata.isSetShareExperimentPublicly());
+//            exBasicData.save();
+//        }
+//        exConfigResource.setExMetadata(exBasicData);
+//        exConfigResource.setApplicationID(configData.getApplicationId());
+//        exConfigResource.setApplicationVersion(configData.getApplicationVersion());
+//        exConfigResource.setWorkflowTemplateId(configData.getWorkflowTemplateId());
+//        exConfigResource.setWorkflowTemplateVersion(configData.getWorklfowTemplateVersion());
+//
+//        ComputationalResourceScheduling resourceScheduling = configData.getComputationalResourceScheduling();
+//        if (resourceScheduling != null) {
+//            exConfigResource.setCpuCount(resourceScheduling.getTotalCPUCount());
+//            exConfigResource.setAiravataAutoSchedule(resourceScheduling.isAiravataAutoSchedule());
+//            exConfigResource.setOverrideManualSchedule(resourceScheduling.isOverrideManualScheduledParams());
+//            exConfigResource.setResourceHostID(resourceScheduling.getResourceHostId());
+//            exConfigResource.setNodeCount(resourceScheduling.getNodeCount());
+//            exConfigResource.setNumberOfThreads(resourceScheduling.getNumberOfThreads());
+//            exConfigResource.setQueueName(resourceScheduling.getQueueName());
+//            exConfigResource.setWallTimeLimit(resourceScheduling.getWallTimeLimit());
+//            exConfigResource.setJobStartTime(getTime(resourceScheduling.getJobStartTime()));
+//            exConfigResource.setPhysicalMemory(resourceScheduling.getTotalPhysicalMemory());
+//            exConfigResource.setProjectAccount(resourceScheduling.getComputationalProjectAccount());
+//        }
+//
+//        AdvancedInputDataHandling inputDataHandling = configData.getAdvanceInputDataHandling();
+//        if (inputDataHandling != null) {
+//            exConfigResource.setStageInputsToWDir(inputDataHandling.isStageInputFilesToWorkingDir());
+//            exConfigResource.setWorkingDirParent(inputDataHandling.getWorkingDirectoryParent());
+//            exConfigResource.setWorkingDir(inputDataHandling.getUniqueWorkingDirectory());
+//            exConfigResource.setCleanAfterJob(inputDataHandling.isCleanUpWorkingDirAfterJob());
+//        }
+//
+//        AdvancedOutputDataHandling outputDataHandling = configData.getAdvanceOutputDataHandling();
+//        if (outputDataHandling != null) {
+//            exConfigResource.setOutputDataDir(outputDataHandling.getOutputdataDir());
+//            exConfigResource.setDataRegURL(outputDataHandling.getDataRegistryURL());
+//            exConfigResource.setPersistOutputData(outputDataHandling.isPersistOutputData());
+//        }
+//
+//        QualityOfServiceParams qosParams = configData.getQosParams();
+//        if (qosParams != null) {
+//            exConfigResource.setStartExecutionAt(qosParams.getStartExecutionAt());
+//            exConfigResource.setExecuteBefore(qosParams.getExecuteBefore());
+//            exConfigResource.setNumberOfRetries(qosParams.getNumberofRetries());
+//        }
+//
+//        Map<String, String> experimentInputs = configData.getExperimentInputs();
+//        for (String inputKey : experimentInputs.keySet()) {
+//            ExperimentInputResource exInputResource = (ExperimentInputResource) exBasicData.create(ResourceType.EXPERIMENT_INPUT);
+//            String value = experimentInputs.get(inputKey);
+//            exInputResource.setExperimentKey(inputKey);
+//            exInputResource.setValue(value);
+//            exInputResource.setExperimentMetadataResource(exBasicData);
+//            exInputResource.save();
+//        }
+//        exConfigResource.save();
     }
 
     public List<BasicMetadata> getExperimentMetaDataList (String fieldName, Object value){
@@ -351,9 +354,9 @@ public class ExperimentRegistry {
                 WorkerResource worker = userRegistry.getExistingUser(ServerSettings.getSystemUserGateway(), (String)value);
                 List<Resource> resources = worker.get(ResourceType.EXPERIMENT_METADATA);
                 for (Resource resource : resources){
-                    ExperimentMetadataResource ex =  (ExperimentMetadataResource)resource;
-                    BasicMetadata basicMetadata = ThriftDataModelConversion.getBasicMetadata(ex);
-                    metadataList.add(basicMetadata);
+//                    ExperimentMetadataResource ex =  (ExperimentMetadataResource)resource;
+//                    BasicMetadata basicMetadata = ThriftDataModelConversion.getBasicMetadata(ex);
+//                    metadataList.add(basicMetadata);
                 }
                 return metadataList;
             }else {
@@ -371,13 +374,13 @@ public class ExperimentRegistry {
             gatewayRegistry = new GatewayRegistry();
             GatewayResource gateway = gatewayRegistry.getDefaultGateway();
             if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.EXPERIMENT_ID)){
-                ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, value);
-                List<Resource> resources = exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA);
-                for (Resource resource : resources){
-                    ExperimentConfigDataResource configDataResource = (ExperimentConfigDataResource)resource;
-                    ConfigurationData conData = ThriftDataModelConversion.getConfigurationData(configDataResource);
-                    configDataList.add(conData);
-                }
+//                ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, value);
+//                List<Resource> resources = exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA);
+//                for (Resource resource : resources){
+//                    ExperimentConfigDataResource configDataResource = (ExperimentConfigDataResource)resource;
+//                    ConfigurationData conData = ThriftDataModelConversion.getConfigurationData(configDataResource);
+//                    configDataList.add(conData);
+//                }
                 return configDataList;
             }else {
                 logger.error("Unsupported field type for Experiment meta data");
@@ -392,24 +395,24 @@ public class ExperimentRegistry {
         try {
             gatewayRegistry = new GatewayRegistry();
             GatewayResource gateway = gatewayRegistry.getDefaultGateway();
-            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
-            if (fieldName == null){
-                return ThriftDataModelConversion.getBasicMetadata(exBasicData);
-            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.USER_NAME)){
-                return exBasicData.getExecutionUser();
-            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_NAME)){
-                return exBasicData.getExperimentName();
-            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_DESC)){
-                return exBasicData.getDescription();
-            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.SHARE_EXP_PUBLIC)){
-                return exBasicData.isShareExp();
-            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.PROJECT_NAME)){
-                return exBasicData.getProject().getName();
-            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.SUBMITTED_DATE)){
-                return exBasicData.getSubmittedDate();
-            }else {
-                logger.error("Unsupported field name for experiment basic data..");
-            }
+//            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
+//            if (fieldName == null){
+//                return ThriftDataModelConversion.getBasicMetadata(exBasicData);
+//            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.USER_NAME)){
+//                return exBasicData.getExecutionUser();
+//            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_NAME)){
+//                return exBasicData.getExperimentName();
+//            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.EXPERIMENT_DESC)){
+//                return exBasicData.getDescription();
+//            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.SHARE_EXP_PUBLIC)){
+//                return exBasicData.isShareExp();
+//            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.PROJECT_NAME)){
+//                return exBasicData.getProject().getName();
+//            }else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.SUBMITTED_DATE)){
+//                return exBasicData.getSubmittedDate();
+//            }else {
+//                logger.error("Unsupported field name for experiment basic data..");
+//            }
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties", e.getMessage());
         }
@@ -420,31 +423,31 @@ public class ExperimentRegistry {
         try {
             gatewayRegistry = new GatewayRegistry();
             GatewayResource gateway = gatewayRegistry.getDefaultGateway();
-            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
-            ExperimentConfigDataResource exCongfig = (ExperimentConfigDataResource)exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA, expId);
-            if (fieldName == null){
-                return ThriftDataModelConversion.getConfigurationData(exCongfig);
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_ID)){
-                return exCongfig.getApplicationID();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_VERSION)){
-                return exCongfig.getApplicationVersion();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_ID)){
-                return exCongfig.getWorkflowTemplateId();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_VERSION)){
-                return exCongfig.getWorkflowTemplateVersion();
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.EXPERIMENT_INPUTS)){
-                return ThriftDataModelConversion.getExperimentInputs(exBasicData);
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.COMPUTATIONAL_RESOURCE_SCHEDULING)){
-                return ThriftDataModelConversion.getComputationalResourceScheduling(exCongfig);
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_INPUT_HANDLING)){
-                return ThriftDataModelConversion.getAdvanceInputDataHandling(exCongfig);
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_OUTPUT_HANDLING)){
-                return ThriftDataModelConversion.getAdvanceOutputDataHandling(exCongfig);
-            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.QOS_PARAMS)){
-                return ThriftDataModelConversion.getQOSParams(exCongfig);
-            }else {
-                logger.error("Unsupported field name for experiment configuration data..");
-            }
+//            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource) gateway.get(ResourceType.EXPERIMENT_METADATA, expId);
+//            ExperimentConfigDataResource exCongfig = (ExperimentConfigDataResource)exBasicData.get(ResourceType.EXPERIMENT_CONFIG_DATA, expId);
+//            if (fieldName == null){
+//                return ThriftDataModelConversion.getConfigurationData(exCongfig);
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_ID)){
+//                return exCongfig.getApplicationID();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.APPLICATION_VERSION)){
+//                return exCongfig.getApplicationVersion();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_ID)){
+//                return exCongfig.getWorkflowTemplateId();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.WORKFLOW_TEMPLATE_VERSION)){
+//                return exCongfig.getWorkflowTemplateVersion();
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.EXPERIMENT_INPUTS)){
+//                return ThriftDataModelConversion.getExperimentInputs(exBasicData);
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.COMPUTATIONAL_RESOURCE_SCHEDULING)){
+//                return ThriftDataModelConversion.getComputationalResourceScheduling(exCongfig);
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_INPUT_HANDLING)){
+//                return ThriftDataModelConversion.getAdvanceInputDataHandling(exCongfig);
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.ADVANCED_OUTPUT_HANDLING)){
+//                return ThriftDataModelConversion.getAdvanceOutputDataHandling(exCongfig);
+//            }else if (fieldName.equals(Constants.FieldConstants.ConfigurationDataConstants.QOS_PARAMS)){
+//                return ThriftDataModelConversion.getQOSParams(exCongfig);
+//            }else {
+//                logger.error("Unsupported field name for experiment configuration data..");
+//            }
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties..", e.getMessage());
         }
@@ -460,18 +463,18 @@ public class ExperimentRegistry {
                     logger.error("You should use an existing gateway in order to retrieve experiments..");
                     return null;
                 } else {
-                    List<Resource> resources = gateway.get(ResourceType.EXPERIMENT_METADATA);
-                    for (Resource resource : resources) {
-                        String expID = ((ExperimentMetadataResource) resource).getExpID();
-                        expIDs.add(expID);
-                    }
+//                    List<Resource> resources = gateway.get(ResourceType.EXPERIMENT_METADATA);
+//                    for (Resource resource : resources) {
+//                        String expID = ((ExperimentMetadataResource) resource).getExpID();
+//                        expIDs.add(expID);
+//                    }
                 }
             } else if (fieldName.equals(Constants.FieldConstants.BasicMetadataConstants.USER_NAME)) {
                 WorkerResource workerResource = userReg.getExistingUser(ServerSettings.getSystemUserGateway(), (String)value);
                 List<Resource> resources = workerResource.get(ResourceType.EXPERIMENT_METADATA);
                 for (Resource resource : resources) {
-                    String expID = ((ExperimentMetadataResource) resource).getExpID();
-                    expIDs.add(expID);
+//                    String expID = ((ExperimentMetadataResource) resource).getExpID();
+//                    expIDs.add(expID);
                 }
             }
         } catch (ApplicationSettingsException e) {
@@ -493,8 +496,8 @@ public class ExperimentRegistry {
     public void removeExperimentConfigData(String experimentId) {
         try {
             GatewayResource defaultGateway = gatewayRegistry.getDefaultGateway();
-            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource)defaultGateway.get(ResourceType.EXPERIMENT_METADATA, experimentId);
-            exBasicData.remove(ResourceType.EXPERIMENT_CONFIG_DATA, experimentId);
+//            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource)defaultGateway.get(ResourceType.EXPERIMENT_METADATA, experimentId);
+//            exBasicData.remove(ResourceType.EXPERIMENT_CONFIG_DATA, experimentId);
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties..", e.getMessage());
         }
@@ -514,8 +517,8 @@ public class ExperimentRegistry {
     public boolean isExperimentConfigDataExist(String expID) {
         try {
             GatewayResource defaultGateway = gatewayRegistry.getDefaultGateway();
-            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource)defaultGateway.get(ResourceType.EXPERIMENT_METADATA, expID);
-            exBasicData.isExists(ResourceType.EXPERIMENT_CONFIG_DATA, expID);
+//            ExperimentMetadataResource exBasicData = (ExperimentMetadataResource)defaultGateway.get(ResourceType.EXPERIMENT_METADATA, expID);
+//            exBasicData.isExists(ResourceType.EXPERIMENT_CONFIG_DATA, expID);
             return true;
         } catch (ApplicationSettingsException e) {
             logger.error("Unable to read airavata-server properties..", e.getMessage());

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedInputDataHandling.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedInputDataHandling.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedInputDataHandling.java
new file mode 100644
index 0000000..16aa6b2
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedInputDataHandling.java
@@ -0,0 +1,125 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "ADVANCE_INPUT_DATA_HANDLING")
+public class AdvancedInputDataHandling {
+    @Id
+    @GeneratedValue
+    @Column(name = "INPUT_DATA_HANDLING_ID")
+    private int dataHandlingId;
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "WORKING_DIR_PARENT")
+    private String parentWorkingDir;
+    @Column(name = "UNIQUE_WORKING_DIR")
+    private String workingDir;
+    @Column(name = "STAGE_INPUT_FILES_TO_WORKING_DIR")
+    private boolean stageInputsToWorkingDir;
+    @Column(name = "CLEAN_AFTER_JOB")
+    private boolean cleanAfterJob;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public int getDataHandlingId() {
+        return dataHandlingId;
+    }
+
+    public void setDataHandlingId(int dataHandlingId) {
+        this.dataHandlingId = dataHandlingId;
+    }
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getParentWorkingDir() {
+        return parentWorkingDir;
+    }
+
+    public void setParentWorkingDir(String parentWorkingDir) {
+        this.parentWorkingDir = parentWorkingDir;
+    }
+
+    public String getWorkingDir() {
+        return workingDir;
+    }
+
+    public void setWorkingDir(String workingDir) {
+        this.workingDir = workingDir;
+    }
+
+    public boolean isStageInputsToWorkingDir() {
+        return stageInputsToWorkingDir;
+    }
+
+    public void setStageInputsToWorkingDir(boolean stageInputsToWorkingDir) {
+        this.stageInputsToWorkingDir = stageInputsToWorkingDir;
+    }
+
+    public boolean isCleanAfterJob() {
+        return cleanAfterJob;
+    }
+
+    public void setCleanAfterJob(boolean cleanAfterJob) {
+        this.cleanAfterJob = cleanAfterJob;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedOutputDataHandling.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedOutputDataHandling.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedOutputDataHandling.java
new file mode 100644
index 0000000..93a1217
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/AdvancedOutputDataHandling.java
@@ -0,0 +1,115 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "ADVANCE_OUTPUT_DATA_HANDLING")
+public class AdvancedOutputDataHandling {
+    @Id
+    @GeneratedValue
+    @Column(name = "OUTPUT_DATA_HANDLING_ID")
+    private int outputDataHandlingId;
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "OUTPUT_DATA_DIR")
+    private String outputDataDir;
+    @Column(name = "DATA_REG_URL")
+    private String dataRegUrl;
+    @Column(name = "PERSIST_OUTPUT_DATA")
+    private boolean persistOutputData;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public int getOutputDataHandlingId() {
+        return outputDataHandlingId;
+    }
+
+    public void setOutputDataHandlingId(int outputDataHandlingId) {
+        this.outputDataHandlingId = outputDataHandlingId;
+    }
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getOutputDataDir() {
+        return outputDataDir;
+    }
+
+    public void setOutputDataDir(String outputDataDir) {
+        this.outputDataDir = outputDataDir;
+    }
+
+    public String getDataRegUrl() {
+        return dataRegUrl;
+    }
+
+    public void setDataRegUrl(String dataRegUrl) {
+        this.dataRegUrl = dataRegUrl;
+    }
+
+    public boolean isPersistOutputData() {
+        return persistOutputData;
+    }
+
+    public void setPersistOutputData(boolean persistOutputData) {
+        this.persistOutputData = persistOutputData;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput.java
new file mode 100644
index 0000000..1b7105b
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput.java
@@ -0,0 +1,94 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "APPLICATION_INPUT")
+@IdClass(ApplicationInput_PK.class)
+public class ApplicationInput {
+    @Id
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Id
+    @Column(name = "INPUT_KEY")
+    private String inputKey;
+    @Column(name = "INPUT_KEY_TYPE")
+    private String inputKeyType;
+    @Column(name = "METADATA")
+    private String metadata;
+    @Column(name = "VALUE")
+    private String value;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getInputKey() {
+        return inputKey;
+    }
+
+    public void setInputKey(String inputKey) {
+        this.inputKey = inputKey;
+    }
+
+    public String getInputKeyType() {
+        return inputKeyType;
+    }
+
+    public void setInputKeyType(String inputKeyType) {
+        this.inputKeyType = inputKeyType;
+    }
+
+    public String getMetadata() {
+        return metadata;
+    }
+
+    public void setMetadata(String metadata) {
+        this.metadata = metadata;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput_PK.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput_PK.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput_PK.java
new file mode 100644
index 0000000..476b2c7
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationInput_PK.java
@@ -0,0 +1,63 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+public class ApplicationInput_PK {
+    private String taskId;
+    private String inputKey;
+
+    public ApplicationInput_PK(String inputKey, String taskId) {
+        this.inputKey = inputKey;
+        this.taskId = taskId;
+    }
+
+    public ApplicationInput_PK() {
+        ;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return 1;
+    }
+
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getInputKey() {
+        return inputKey;
+    }
+
+    public void setInputKey(String inputKey) {
+        this.inputKey = inputKey;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput.java
new file mode 100644
index 0000000..4afd1a1
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput.java
@@ -0,0 +1,94 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "APPLICATION_OUTPUT")
+@IdClass(ApplicationOutput_PK.class)
+public class ApplicationOutput {
+    @Id
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Id
+    @Column(name = "OUTPUT_KEY")
+    private String outputKey;
+    @Column(name = "OUTPUT_KEY_TYPE")
+    private String outputKeyType;
+    @Column(name = "METADATA")
+    private String metadata;
+    @Column(name = "VALUE")
+    private String value;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getMetadata() {
+        return metadata;
+    }
+
+    public void setMetadata(String metadata) {
+        this.metadata = metadata;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+
+    public String getOutputKey() {
+        return outputKey;
+    }
+
+    public void setOutputKey(String outputKey) {
+        this.outputKey = outputKey;
+    }
+
+    public String getOutputKeyType() {
+        return outputKeyType;
+    }
+
+    public void setOutputKeyType(String outputKeyType) {
+        this.outputKeyType = outputKeyType;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput_PK.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput_PK.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput_PK.java
new file mode 100644
index 0000000..d046134
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ApplicationOutput_PK.java
@@ -0,0 +1,62 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+public class ApplicationOutput_PK {
+    private String taskId;
+    private String outputKey;
+
+    public ApplicationOutput_PK(String outputKey, String taskId) {
+        this.outputKey = outputKey;
+        this.taskId = taskId;
+    }
+
+    public ApplicationOutput_PK() {
+        ;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return 1;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getOutputKey() {
+        return outputKey;
+    }
+
+    public void setOutputKey(String outputKey) {
+        this.outputKey = outputKey;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Computational_Resource_Scheduling.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Computational_Resource_Scheduling.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Computational_Resource_Scheduling.java
new file mode 100644
index 0000000..0f199e9
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Computational_Resource_Scheduling.java
@@ -0,0 +1,176 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "COMPUTATIONAL_RESOURCE_SCHEDULING")
+public class Computational_Resource_Scheduling {
+    @Id
+    @GeneratedValue
+    @Column(name = "RESOURCE_SCHEDULING_ID")
+    private int schedulingId;
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "RESOURCE_HOST_ID")
+    private String resourceHostId;
+    @Column(name = "CPU_COUNT")
+    private int cpuCount;
+    @Column(name = "NODE_COUNT")
+    private int nodeCount;
+    @Column(name = "NO_OF_THREADS")
+    private int numberOfThreads;
+    @Column(name = "QUEUE_NAME")
+    private String queueName;
+    @Column(name = "WALLTIME_LIMIT")
+    private int wallTimeLimit;
+    @Column(name = "JOB_START_TIME")
+    private Timestamp jobStartTime;
+    @Column(name = "TOTAL_PHYSICAL_MEMORY")
+    private int totalPhysicalmemory;
+    @Column(name = "COMPUTATIONAL_PROJECT_ACCOUNT")
+    private String projectName;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public int getSchedulingId() {
+        return schedulingId;
+    }
+
+    public void setSchedulingId(int schedulingId) {
+        this.schedulingId = schedulingId;
+    }
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getResourceHostId() {
+        return resourceHostId;
+    }
+
+    public void setResourceHostId(String resourceHostId) {
+        this.resourceHostId = resourceHostId;
+    }
+
+    public int getCpuCount() {
+        return cpuCount;
+    }
+
+    public void setCpuCount(int cpuCount) {
+        this.cpuCount = cpuCount;
+    }
+
+    public int getNodeCount() {
+        return nodeCount;
+    }
+
+    public void setNodeCount(int nodeCount) {
+        this.nodeCount = nodeCount;
+    }
+
+    public int getNumberOfThreads() {
+        return numberOfThreads;
+    }
+
+    public void setNumberOfThreads(int numberOfThreads) {
+        this.numberOfThreads = numberOfThreads;
+    }
+
+    public String getQueueName() {
+        return queueName;
+    }
+
+    public void setQueueName(String queueName) {
+        this.queueName = queueName;
+    }
+
+    public int getWallTimeLimit() {
+        return wallTimeLimit;
+    }
+
+    public void setWallTimeLimit(int wallTimeLimit) {
+        this.wallTimeLimit = wallTimeLimit;
+    }
+
+    public Timestamp getJobStartTime() {
+        return jobStartTime;
+    }
+
+    public void setJobStartTime(Timestamp jobStartTime) {
+        this.jobStartTime = jobStartTime;
+    }
+
+    public int getTotalPhysicalmemory() {
+        return totalPhysicalmemory;
+    }
+
+    public void setTotalPhysicalmemory(int totalPhysicalmemory) {
+        this.totalPhysicalmemory = totalPhysicalmemory;
+    }
+
+    public String getProjectName() {
+        return projectName;
+    }
+
+    public void setProjectName(String projectName) {
+        this.projectName = projectName;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/DataTransferDetail.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/DataTransferDetail.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/DataTransferDetail.java
new file mode 100644
index 0000000..496d2e5
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/DataTransferDetail.java
@@ -0,0 +1,83 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "DATA_TRANSFER_DETAIL")
+public class DataTransferDetail {
+    @Id
+    @Column(name = "TRANSFER_ID")
+    private String transferId;
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "CREATION_TIME")
+    private Timestamp creationTime;
+    @Column(name = "TRANSFER_DESC")
+    private String transferDesc;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public String getTransferId() {
+        return transferId;
+    }
+
+    public void setTransferId(String transferId) {
+        this.transferId = transferId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public Timestamp getCreationTime() {
+        return creationTime;
+    }
+
+    public void setCreationTime(Timestamp creationTime) {
+        this.creationTime = creationTime;
+    }
+
+    public String getTransferDesc() {
+        return transferDesc;
+    }
+
+    public void setTransferDesc(String transferDesc) {
+        this.transferDesc = transferDesc;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ErrorDetails.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ErrorDetails.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ErrorDetails.java
new file mode 100644
index 0000000..47194f3
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ErrorDetails.java
@@ -0,0 +1,178 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "ERROR_DETAIL")
+public class ErrorDetails {
+    @Id
+    @GeneratedValue
+    @Column(name = "ERROR_ID")
+    private int errorID;
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "NODE_INSTANCE_ID")
+    private String nodeId;
+    @Column(name = "CREATION_TIME")
+    private Timestamp creationTime;
+    @Column(name = "ACTUAL_ERROR_MESSAGE")
+    private String actualErrorMsg;
+    @Column(name = "USER_FRIEDNLY_ERROR_MSG")
+    private String userFriendlyErrorMsg;
+    @Column(name = "TRANSIENT_OR_PERSISTENT")
+    private boolean transientPersistent;
+    @Column(name = "ERROR_CATEGORY")
+    private String errorCategory;
+    @Column(name = "CORRECTIVE_ACTION")
+    private String correctiveAction;
+    @Column(name = "ACTIONABLE_GROUP")
+    private String actionableGroup;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "NODE_INSTANCE_ID")
+    private WorkflowNodeDetail nodeDetails;
+
+    public int getErrorID() {
+        return errorID;
+    }
+
+    public void setErrorID(int errorID) {
+        this.errorID = errorID;
+    }
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public Timestamp getCreationTime() {
+        return creationTime;
+    }
+
+    public void setCreationTime(Timestamp creationTime) {
+        this.creationTime = creationTime;
+    }
+
+    public String getActualErrorMsg() {
+        return actualErrorMsg;
+    }
+
+    public void setActualErrorMsg(String actualErrorMsg) {
+        this.actualErrorMsg = actualErrorMsg;
+    }
+
+    public String getUserFriendlyErrorMsg() {
+        return userFriendlyErrorMsg;
+    }
+
+    public void setUserFriendlyErrorMsg(String userFriendlyErrorMsg) {
+        this.userFriendlyErrorMsg = userFriendlyErrorMsg;
+    }
+
+    public boolean isTransientPersistent() {
+        return transientPersistent;
+    }
+
+    public void setTransientPersistent(boolean transientPersistent) {
+        this.transientPersistent = transientPersistent;
+    }
+
+    public String getErrorCategory() {
+        return errorCategory;
+    }
+
+    public void setErrorCategory(String errorCategory) {
+        this.errorCategory = errorCategory;
+    }
+
+    public String getActionableGroup() {
+        return actionableGroup;
+    }
+
+    public void setActionableGroup(String actionableGroup) {
+        this.actionableGroup = actionableGroup;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+
+    public WorkflowNodeDetail getNodeDetails() {
+        return nodeDetails;
+    }
+
+    public void setNodeDetails(WorkflowNodeDetail nodeDetails) {
+        this.nodeDetails = nodeDetails;
+    }
+
+    public String getCorrectiveAction() {
+        return correctiveAction;
+    }
+
+    public void setCorrectiveAction(String correctiveAction) {
+        this.correctiveAction = correctiveAction;
+    }
+}


[6/8] new datamodels for AIRAVATA-1017

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Execution_Error.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Execution_Error.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Execution_Error.java
deleted file mode 100644
index fa34ede..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Execution_Error.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import java.sql.Timestamp;
-
-import javax.persistence.*;
-
-@Entity
-@Table(name="EXECUTION_ERROR")
-public class Execution_Error {
-    @Id @GeneratedValue
-    @Column(name = "ERROR_ID")
-    private int error_id;
-
-    @Column(name = "EXPERIMENT_ID")
-    private String experiment_ID;
-    @Column(name = "WORKFLOW_INSTANCE_ID")
-    private String workflow_instanceID;
-    @Column(name = "NODE_ID")
-    private String node_id;
-    @Column(name = "GFAC_JOB_ID")
-    private String gfacJobID;
-    @Column(name = "SOURCE_TYPE")
-    private String source_type;
-    @Column(name = "ERROR_DATE")
-    private Timestamp error_date;
-
-    @Column(name = "ERROR_REPORTER")
-    private String error_reporter;
-    @Column(name = "ERROR_LOCATION")
-    private String error_location;
-    @Column(name = "ACTION_TAKEN")
-    private String action_taken;
-    @Column(name = "ERROR_REFERENCE")
-    private int error_reference;
-
-    @ManyToOne()
-    @JoinColumn(name = "EXPERIMENT_ID")
-    private Experiment_Metadata experiment_metadata;
-
-    @ManyToOne()
-    @JoinColumn(name = "WORKFLOW_INSTANCE_ID")
-    private Workflow_Data workflow_Data;
-
-    @Lob
-    @Column(name = "ERROR_MSG")
-    private String error_msg;
-    @Lob
-    @Column(name = "ERROR_DESC")
-    private String error_des;
-    @Column(name = "ERROR_CODE")
-    private String error_code;
-
-    public String getWorkflow_instanceID() {
-        return workflow_instanceID;
-    }
-
-    public String getNode_id() {
-        return node_id;
-    }
-
-    public Workflow_Data getWorkflow_Data() {
-        return workflow_Data;
-    }
-
-    public String getError_msg() {
-        return error_msg;
-    }
-
-    public String getError_des() {
-        return error_des;
-    }
-
-    public String getError_code() {
-        return error_code;
-    }
-
-    public void setWorkflow_instanceID(String workflow_instanceID) {
-        this.workflow_instanceID = workflow_instanceID;
-    }
-
-    public void setNode_id(String node_id) {
-        this.node_id = node_id;
-    }
-
-    public void setWorkflow_Data(Workflow_Data workflow_Data) {
-        this.workflow_Data = workflow_Data;
-    }
-
-    public void setError_msg(String error_msg) {
-        this.error_msg = error_msg;
-    }
-
-    public void setError_des(String error_des) {
-        this.error_des = error_des;
-    }
-
-    public void setError_code(String error_code) {
-        this.error_code = error_code;
-    }
-
-    public int getError_id() {
-        return error_id;
-    }
-
-    public String getExperiment_ID() {
-        return experiment_ID;
-    }
-
-    public String getGfacJobID() {
-        return gfacJobID;
-    }
-
-    public String getSource_type() {
-        return source_type;
-    }
-
-    public Timestamp getError_date() {
-        return error_date;
-    }
-
-    public Experiment_Metadata getExperiment_MetaData() {
-        return experiment_metadata;
-    }
-
-    public void setError_id(int error_id) {
-        this.error_id = error_id;
-    }
-
-    public void setExperiment_ID(String experiment_ID) {
-        this.experiment_ID = experiment_ID;
-    }
-
-    public void setGfacJobID(String gfacJobID) {
-        this.gfacJobID = gfacJobID;
-    }
-
-    public void setSource_type(String source_type) {
-        this.source_type = source_type;
-    }
-
-    public void setError_date(Timestamp error_date) {
-        this.error_date = error_date;
-    }
-
-    public void setExperiment_Metadata(Experiment_Metadata experiment_metadata) {
-        this.experiment_metadata = experiment_metadata;
-    }
-
-    public String getError_reporter() {
-        return error_reporter;
-    }
-
-    public String getError_location() {
-        return error_location;
-    }
-
-    public String getAction_taken() {
-        return action_taken;
-    }
-
-    public Experiment_Metadata getExperiment_Metadata() {
-        return experiment_metadata;
-    }
-
-    public void setError_reporter(String error_reporter) {
-        this.error_reporter = error_reporter;
-    }
-
-    public void setError_location(String error_location) {
-        this.error_location = error_location;
-    }
-
-    public void setAction_taken(String action_taken) {
-        this.action_taken = action_taken;
-    }
-
-    public int getError_reference() {
-        return error_reference;
-    }
-
-    public void setError_reference(int error_reference) {
-        this.error_reference = error_reference;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment.java
index 0577a3d..c83440b 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment.java
@@ -1,91 +1,176 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//
-//package org.apache.airavata.persistance.registry.jpa.model;
-//
-//import javax.persistence.*;
-//import java.sql.Timestamp;
-//
-///**
-// * @deprecated use Experiment_Metadata instead
-// */
-//
-//@Entity
-//public class Experiment {
-//    @Id
-//    private String experiment_ID;
-//    private Timestamp submitted_date;
-//    private String user_name;
-//    private String gateway_name;
-//    private String project_name;
-//
-//    @ManyToOne(cascade= CascadeType.MERGE)
-//    @JoinColumn(name = "user_name")
-//    private Users user;
-//
-//    @ManyToOne(cascade=CascadeType.MERGE)
-//    @JoinColumn(name = "gateway_name")
-//    private Gateway gateway;
-//
-//    @ManyToOne(cascade=CascadeType.MERGE)
-//    @JoinColumn(name = "project_name")
-//    private Project project;
-//
-//    public String getExperiment_ID() {
-//        return experiment_ID;
-//    }
-//
-//    public Timestamp getSubmitted_date() {
-//        return submitted_date;
-//    }
-//
-//    public Users getUser() {
-//        return user;
-//    }
-//
-//    public Project getProject() {
-//        return project;
-//    }
-//
-//    public void setExperiment_ID(String experiment_ID) {
-//        this.experiment_ID = experiment_ID;
-//    }
-//
-//    public void setSubmitted_date(Timestamp submitted_date) {
-//        this.submitted_date = submitted_date;
-//    }
-//
-//    public void setUser(Users user) {
-//        this.user = user;
-//    }
-//
-//    public void setProject(Project project) {
-//        this.project = project;
-//    }
-//
-//    public Gateway getGateway() {
-//        return gateway;
-//    }
-//
-//    public void setGateway(Gateway gateway) {
-//        this.gateway = gateway;
-//    }
-//}
+/*
+*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*   http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied.  See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*
+*/
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "EXPERIMENT")
+public class Experiment {
+    @Id
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "GATEWAY_NAME")
+    private String gatewayName;
+    @Column(name = "EXECUTION_USER")
+    private String executionUser;
+    @Column(name = "PROJECT_NAME")
+    private String projectName;
+    @Column(name = "CREATION_TIME")
+    private Timestamp creationTime;
+    @Column(name = "EXPERIMENT_NAME")
+    private String expName;
+    @Column(name = "EXPERIMENT_DESCRIPTION")
+    private String expDesc;
+    @Column(name = "APPLICATION_ID")
+    private String applicationId;
+    @Column(name = "APPLICATION_VERSION")
+    private String appVersion;
+    @Column(name = "WORKFLOW_TEMPLATE_ID")
+    private String workflowTemplateId;
+    @Column(name = "WORKFLOW_TEMPLATE_VERSION")
+    private String workflowTemplateVersion;
+    @Column(name = "WORKFLOW_EXECUTION_ID")
+    private String workflowExecutionId;
+
+
+    @ManyToOne(cascade=CascadeType.MERGE)
+    @JoinColumn(name = "gateway_name")
+    private Gateway gateway;
+
+    @ManyToOne(cascade=CascadeType.MERGE)
+    @JoinColumn(name = "project_name")
+    private Project project;
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getGatewayName() {
+        return gatewayName;
+    }
+
+    public void setGatewayName(String gatewayName) {
+        this.gatewayName = gatewayName;
+    }
+
+    public String getExecutionUser() {
+        return executionUser;
+    }
+
+    public void setExecutionUser(String executionUser) {
+        this.executionUser = executionUser;
+    }
+
+    public String getProjectName() {
+        return projectName;
+    }
+
+    public void setProjectName(String projectName) {
+        this.projectName = projectName;
+    }
+
+    public Timestamp getCreationTime() {
+        return creationTime;
+    }
+
+    public void setCreationTime(Timestamp creationTime) {
+        this.creationTime = creationTime;
+    }
+
+    public String getExpName() {
+        return expName;
+    }
+
+    public void setExpName(String expName) {
+        this.expName = expName;
+    }
+
+    public String getExpDesc() {
+        return expDesc;
+    }
+
+    public void setExpDesc(String expDesc) {
+        this.expDesc = expDesc;
+    }
+
+    public String getApplicationId() {
+        return applicationId;
+    }
+
+    public void setApplicationId(String applicationId) {
+        this.applicationId = applicationId;
+    }
+
+    public String getAppVersion() {
+        return appVersion;
+    }
+
+    public void setAppVersion(String appVersion) {
+        this.appVersion = appVersion;
+    }
+
+    public String getWorkflowTemplateId() {
+        return workflowTemplateId;
+    }
+
+    public void setWorkflowTemplateId(String workflowTemplateId) {
+        this.workflowTemplateId = workflowTemplateId;
+    }
+
+    public String getWorkflowTemplateVersion() {
+        return workflowTemplateVersion;
+    }
+
+    public void setWorkflowTemplateVersion(String workflowTemplateVersion) {
+        this.workflowTemplateVersion = workflowTemplateVersion;
+    }
+
+    public String getWorkflowExecutionId() {
+        return workflowExecutionId;
+    }
+
+    public void setWorkflowExecutionId(String workflowExecutionId) {
+        this.workflowExecutionId = workflowExecutionId;
+    }
+
+    public Gateway getGateway() {
+        return gateway;
+    }
+
+    public void setGateway(Gateway gateway) {
+        this.gateway = gateway;
+    }
+
+    public Project getProject() {
+        return project;
+    }
+
+    public void setProject(Project project) {
+        this.project = project;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ExperimentConfigData.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ExperimentConfigData.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ExperimentConfigData.java
new file mode 100644
index 0000000..ef7d38e
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/ExperimentConfigData.java
@@ -0,0 +1,82 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "CONFIG_DATA")
+public class ExperimentConfigData {
+    @Id
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "AIRAVATA_AUTO_SCHEDULE")
+    private boolean airavataAutoSchedule;
+    @Column(name = "OVERRIDE_MANUAL_SCHEDULE_PARAMS")
+    private boolean overrideManualParams;
+    @Column(name = "SHARE_EXPERIMENT")
+    private boolean shareExp;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public boolean isAiravataAutoSchedule() {
+        return airavataAutoSchedule;
+    }
+
+    public void setAiravataAutoSchedule(boolean airavataAutoSchedule) {
+        this.airavataAutoSchedule = airavataAutoSchedule;
+    }
+
+    public boolean isOverrideManualParams() {
+        return overrideManualParams;
+    }
+
+    public void setOverrideManualParams(boolean overrideManualParams) {
+        this.overrideManualParams = overrideManualParams;
+    }
+
+    public boolean isShareExp() {
+        return shareExp;
+    }
+
+    public void setShareExp(boolean shareExp) {
+        this.shareExp = shareExp;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Configuration_Data.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Configuration_Data.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Configuration_Data.java
deleted file mode 100644
index 509f8f4..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Configuration_Data.java
+++ /dev/null
@@ -1,314 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import javax.persistence.*;
-import java.sql.Timestamp;
-
-@Entity
-@Table(name ="EXPERIMENT_CONFIGURATION_DATA")
-public class Experiment_Configuration_Data {
-    @Id
-    @Column(name = "EXPERIMENT_ID")
-    private String experiment_id;
-    @ManyToOne
-    @JoinColumn(name = "EXPERIMENT_ID")
-    private Experiment_Metadata experiment_metadata;
-    @Column(name = "RESOURCE_HOST_ID")
-    private String resource_host_id;
-    @Column(name = "TOTAL_CPU_COUNT")
-    private int total_cpu_count;
-    @Column(name = "NODE_COUNT")
-    private int node_count;
-    @Column(name = "NUMBER_OF_THREADS")
-    private int number_of_threads;
-    @Column(name = "QUEUE_NAME")
-    private String queue_name;
-    @Column(name = "WALLTIME_LIMIT")
-    private int walltime_limit;
-    @Column(name = "JOB_START_TIME")
-    private Timestamp job_start_time;
-    @Column(name = "TOTAL_PHYSICAL_MEMORY")
-    private int total_physical_memory;
-    @Column(name = "COMPUTATIONAL_PROJECT_ACCOUNT")
-    private String computational_project_account;
-    @Column(name = "AIRAVATA_AUTO_SCHEDULE")
-    private boolean airavata_auto_schedule;
-    @Column(name = "OVERRIDE_MANUAL_SCHEDULE_PARAMS")
-    private boolean override_manual_schedule;
-    @Column(name = "UNIQUE_WORKING_DIR")
-    private String unique_working_dir;
-    @Column(name = "STAGE_INPUT_FILES_TO_WORKING_DIR")
-    private boolean stage_input_files_to_working_dir;
-    @Column(name = "OUTPUT_DATA_DIR")
-    private String output_data_dir;
-    @Column(name = "DATA_REG_URL")
-    private String data_reg_url;
-    @Column(name = "PERSIST_OUTPUT_DATA")
-    private boolean persist_output_data;
-    @Column(name = "CLEAN_AFTER_JOB")
-    private boolean clean_after_job;
-    @Column(name = "APPLICATION_ID")
-    private String application_id;
-    @Column(name = "APPLICATION_VERSION")
-    private String application_version;
-    @Column(name = "WORKFLOW_TEMPLATE_ID")
-    private String workflow_template_id;
-    @Column(name = "WORKFLOW_TEMPLATE_VERSION")
-    private String workflow_template_version;
-    @Column(name = "WORKING_DIR_PARENT")
-    private String working_dir_parent;
-    @Column(name = "START_EXECUTION_AT")
-    private String start_execution_at;
-    @Column(name = "EXECUTE_BEFORE")
-    private String execute_before;
-    @Column(name = "NUMBER_OF_RETRIES")
-    private int number_of_retries;
-
-    @Lob
-    @Column(name = "EXPERIMENT_CONFIG_DATA")
-    private byte[] experiment_config_data;
-
-    public Experiment_Metadata getExperiment_metadata() {
-        return experiment_metadata;
-    }
-
-    public void setExperiment_metadata(Experiment_Metadata experiment_metadata) {
-        this.experiment_metadata = experiment_metadata;
-    }
-
-    public String getResource_host_id() {
-        return resource_host_id;
-    }
-
-    public void setResource_host_id(String resource_host_id) {
-        this.resource_host_id = resource_host_id;
-    }
-
-    public int getTotal_cpu_count() {
-        return total_cpu_count;
-    }
-
-    public void setTotal_cpu_count(int total_cpu_count) {
-        this.total_cpu_count = total_cpu_count;
-    }
-
-    public int getNode_count() {
-        return node_count;
-    }
-
-    public void setNode_count(int node_count) {
-        this.node_count = node_count;
-    }
-
-    public int getNumber_of_threads() {
-        return number_of_threads;
-    }
-
-    public void setNumber_of_threads(int number_of_threads) {
-        this.number_of_threads = number_of_threads;
-    }
-
-    public String getQueue_name() {
-        return queue_name;
-    }
-
-    public void setQueue_name(String queue_name) {
-        this.queue_name = queue_name;
-    }
-
-    public int getWalltime_limit() {
-        return walltime_limit;
-    }
-
-    public void setWalltime_limit(int walltime_limit) {
-        this.walltime_limit = walltime_limit;
-    }
-
-    public Timestamp getJob_start_time() {
-        return job_start_time;
-    }
-
-    public void setJob_start_time(Timestamp job_start_time) {
-        this.job_start_time = job_start_time;
-    }
-
-    public int getTotal_physical_memory() {
-        return total_physical_memory;
-    }
-
-    public void setTotal_physical_memory(int total_physical_memory) {
-        this.total_physical_memory = total_physical_memory;
-    }
-
-    public String getComputational_project_account() {
-        return computational_project_account;
-    }
-
-    public void setComputational_project_account(String computational_project_account) {
-        this.computational_project_account = computational_project_account;
-    }
-
-    public boolean isAiravata_auto_schedule() {
-        return airavata_auto_schedule;
-    }
-
-    public void setAiravata_auto_schedule(boolean airavata_auto_schedule) {
-        this.airavata_auto_schedule = airavata_auto_schedule;
-    }
-
-    public boolean isOverride_manual_schedule() {
-        return override_manual_schedule;
-    }
-
-    public void setOverride_manual_schedule(boolean override_manual_schedule) {
-        this.override_manual_schedule = override_manual_schedule;
-    }
-
-    public boolean isStage_input_files_to_working_dir() {
-        return stage_input_files_to_working_dir;
-    }
-
-    public void setStage_input_files_to_working_dir(boolean stage_input_files_to_working_dir) {
-        this.stage_input_files_to_working_dir = stage_input_files_to_working_dir;
-    }
-
-    public String getOutput_data_dir() {
-        return output_data_dir;
-    }
-
-    public void setOutput_data_dir(String output_data_dir) {
-        this.output_data_dir = output_data_dir;
-    }
-
-    public String getData_reg_url() {
-        return data_reg_url;
-    }
-
-    public void setData_reg_url(String data_reg_url) {
-        this.data_reg_url = data_reg_url;
-    }
-
-    public boolean isPersist_output_data() {
-        return persist_output_data;
-    }
-
-    public void setPersist_output_data(boolean persist_output_data) {
-        this.persist_output_data = persist_output_data;
-    }
-
-    public boolean isClean_after_job() {
-        return clean_after_job;
-    }
-
-    public void setClean_after_job(boolean clean_after_job) {
-        this.clean_after_job = clean_after_job;
-    }
-
-    public byte[] getExperiment_config_data() {
-        return experiment_config_data;
-    }
-
-    public void setExperiment_config_data(byte[] experiment_config_data) {
-        this.experiment_config_data = experiment_config_data;
-    }
-
-    public String getUnique_working_dir() {
-        return unique_working_dir;
-    }
-
-    public void setUnique_working_dir(String unique_working_dir) {
-        this.unique_working_dir = unique_working_dir;
-    }
-
-    public String getApplication_id() {
-        return application_id;
-    }
-
-    public void setApplication_id(String application_id) {
-        this.application_id = application_id;
-    }
-
-    public String getApplication_version() {
-        return application_version;
-    }
-
-    public void setApplication_version(String application_version) {
-        this.application_version = application_version;
-    }
-
-    public String getWorkflow_template_id() {
-        return workflow_template_id;
-    }
-
-    public void setWorkflow_template_id(String workflow_template_id) {
-        this.workflow_template_id = workflow_template_id;
-    }
-
-    public String getWorkflow_template_version() {
-        return workflow_template_version;
-    }
-
-    public void setWorkflow_template_version(String workflow_template_version) {
-        this.workflow_template_version = workflow_template_version;
-    }
-
-    public String getWorking_dir_parent() {
-        return working_dir_parent;
-    }
-
-    public void setWorking_dir_parent(String working_dir_parent) {
-        this.working_dir_parent = working_dir_parent;
-    }
-
-    public String getStart_execution_at() {
-        return start_execution_at;
-    }
-
-    public void setStart_execution_at(String start_execution_at) {
-        this.start_execution_at = start_execution_at;
-    }
-
-    public String getExecute_before() {
-        return execute_before;
-    }
-
-    public void setExecute_before(String execute_before) {
-        this.execute_before = execute_before;
-    }
-
-    public int getNumber_of_retries() {
-        return number_of_retries;
-    }
-
-    public void setNumber_of_retries(int number_of_retries) {
-        this.number_of_retries = number_of_retries;
-    }
-
-    public String getExperiment_id() {
-        return experiment_id;
-    }
-
-    public void setExperiment_id(String experiment_id) {
-        this.experiment_id = experiment_id;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Data.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Data.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Data.java
deleted file mode 100644
index d070fe8..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Data.java
+++ /dev/null
@@ -1,66 +0,0 @@
-///*
-// *
-// * Licensed to the Apache Software Foundation (ASF) under one
-// * or more contributor license agreements.  See the NOTICE file
-// * distributed with this work for additional information
-// * regarding copyright ownership.  The ASF licenses this file
-// * to you under the Apache License, Version 2.0 (the
-// * "License"); you may not use this file except in compliance
-// * with the License.  You may obtain a copy of the License at
-// *
-// *   http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing,
-// * software distributed under the License is distributed on an
-// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// * KIND, either express or implied.  See the License for the
-// * specific language governing permissions and limitations
-// * under the License.
-// *
-//*/
-//package org.apache.airavata.persistance.registry.jpa.model;
-//
-//import javax.persistence.Entity;
-//import javax.persistence.Id;
-//
-///**
-// * @deprecated user Experiment_Configuration_Data
-// */
-//@Entity
-//public class Experiment_Data {
-//	@Id
-//	private String experiment_ID;
-//	private String name;
-//    private String username;
-//
-//	/*@OneToMany(cascade=CascadeType.ALL, mappedBy = "Experiment_Data")
-//	private final List<Workflow_Data> workflows = new ArrayList<Workflow_Data>();*/
-//
-//	public String getExperiment_ID() {
-//		return experiment_ID;
-//	}
-//
-//	public void setExperiment_ID(String experiment_ID) {
-//		this.experiment_ID = experiment_ID;
-//	}
-//
-//	public String getName() {
-//		return name;
-//	}
-//
-//	public void setName(String name) {
-//		this.name = name;
-//	}
-//
-//    public String getUsername() {
-//        return username;
-//    }
-//
-//    public void setUsername(String username) {
-//        this.username = username;
-//    }
-//
-//    /*public List<Workflow_Data> getWorkflows() {
-//        return workflows;
-//    }*/
-//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Input.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Input.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Input.java
index cbdd6dd..0b40b46 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Input.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Input.java
@@ -32,15 +32,21 @@ public class Experiment_Input {
     private String experiment_id;
 
     @Id
-    @Column(name = "EX_KEY")
+    @Column(name = "INPUT_KEY")
     private String ex_key;
 
     @Column(name = "VALUE")
     private String value;
 
+    @Column(name = "INPUT_TYPE")
+    private String inputType;
+
+    @Column(name = "METADATA")
+    private String metadata;
+
     @ManyToOne
     @JoinColumn(name = "EXPERIMENT_ID")
-    private Experiment_Metadata experiment_metadata;
+    private Experiment experiment;
 
     public String getExperiment_id() {
         return experiment_id;
@@ -50,14 +56,6 @@ public class Experiment_Input {
         this.experiment_id = experiment_id;
     }
 
-    public Experiment_Metadata getExperiment_metadata() {
-        return experiment_metadata;
-    }
-
-    public void setExperiment_metadata(Experiment_Metadata experiment_metadata) {
-        this.experiment_metadata = experiment_metadata;
-    }
-
     public String getEx_key() {
         return ex_key;
     }
@@ -73,4 +71,28 @@ public class Experiment_Input {
     public void setValue(String value) {
         this.value = value;
     }
+
+    public String getInputType() {
+        return inputType;
+    }
+
+    public void setInputType(String inputType) {
+        this.inputType = inputType;
+    }
+
+    public String getMetadata() {
+        return metadata;
+    }
+
+    public void setMetadata(String metadata) {
+        this.metadata = metadata;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Metadata.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Metadata.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Metadata.java
deleted file mode 100644
index 0732caf..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Metadata.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import javax.persistence.*;
-import java.sql.Timestamp;
-
-@Entity
-@Table(name ="EXPERIMENT_METADATA")
-public class Experiment_Metadata {
-    @Id
-    @Column(name = "EXPERIMENT_ID")
-    private String experiment_id;
-    @Column(name = "EXPERIMENT_NAME")
-    private String experiment_name;
-    @Column(name = "DESCRIPTION")
-    private String description;
-    @Column(name = "SUBMITTED_DATE")
-    private Timestamp submitted_date;
-    @Column(name = "EXECUTION_USER" )
-    private String execution_user;
-    @Column(name = "GATEWAY_NAME")
-    private String gateway_name;
-    @Column(name = "PROJECT_NAME")
-    private String project_name;
-
-
-//    @ManyToOne(cascade= CascadeType.MERGE)
-//    @JoinColumn(name = "EXECUTION_USER", referencedColumnName = "USER_NAME" )
-//    private Users user;
-
-    @ManyToOne(cascade=CascadeType.MERGE)
-    @JoinColumn(name = "GATEWAY_NAME")
-    private Gateway gateway;
-
-    @ManyToOne(cascade=CascadeType.MERGE)
-    @JoinColumn(name = "PROJECT_NAME")
-    private Project project;
-
-    private boolean share_experiment;
-
-    public String getExecution_user() {
-        return execution_user;
-    }
-
-    public void setExecution_user(String execution_user) {
-        this.execution_user = execution_user;
-    }
-
-    public String getGateway_name() {
-        return gateway_name;
-    }
-
-    public void setGateway_name(String gateway_name) {
-        this.gateway_name = gateway_name;
-    }
-
-    public String getProject_name() {
-        return project_name;
-    }
-
-    public void setProject_name(String project_name) {
-        this.project_name = project_name;
-    }
-
-    public String getExperiment_id() {
-        return experiment_id;
-    }
-
-    public void setExperiment_id(String experiment_id) {
-        this.experiment_id = experiment_id;
-    }
-
-    public String getExperiment_name() {
-        return experiment_name;
-    }
-
-    public void setExperiment_name(String experiment_name) {
-        this.experiment_name = experiment_name;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    public void setDescription(String description) {
-        this.description = description;
-    }
-
-    public Timestamp getSubmitted_date() {
-        return submitted_date;
-    }
-
-    public void setSubmitted_date(Timestamp submitted_date) {
-        this.submitted_date = submitted_date;
-    }
-
-//    public Users getUser() {
-//        return user;
-//    }
-//
-//    public void setUser(Users user) {
-//        this.user = user;
-//    }
-
-    public Gateway getGateway() {
-        return gateway;
-    }
-
-    public void setGateway(Gateway gateway) {
-        this.gateway = gateway;
-    }
-
-    public Project getProject() {
-        return project;
-    }
-
-    public void setProject(Project project) {
-        this.project = project;
-    }
-
-    public boolean isShare_experiment() {
-        return share_experiment;
-    }
-
-    public void setShare_experiment(boolean share_experiment) {
-        this.share_experiment = share_experiment;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Output.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Output.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Output.java
index 57f1d2c..4c9e055 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Output.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Output.java
@@ -32,14 +32,18 @@ public class Experiment_Output {
     private String experiment_id;
 
     @Id
-    @Column(name = "EX_KEY")
+    @Column(name = "OUTPUT_KEY")
     private String ex_key;
     @Column(name = "VALUE")
     private String value;
+    @Column(name = "METADATA")
+    private String metadata;
+    @Column(name = "OUTPUT_KEY_TYPE")
+    private String outputKeyType;
 
     @ManyToOne
     @JoinColumn(name = "EXPERIMENT_ID")
-    private Experiment_Metadata experiment_metadata;
+    private Experiment experiment;
 
     public String getExperiment_id() {
         return experiment_id;
@@ -49,14 +53,6 @@ public class Experiment_Output {
         this.experiment_id = experiment_id;
     }
 
-    public Experiment_Metadata getExperiment_metadata() {
-        return experiment_metadata;
-    }
-
-    public void setExperiment_metadata(Experiment_Metadata experiment_metadata) {
-        this.experiment_metadata = experiment_metadata;
-    }
-
     public String getEx_key() {
         return ex_key;
     }
@@ -72,4 +68,28 @@ public class Experiment_Output {
     public void setValue(String value) {
         this.value = value;
     }
+
+    public String getMetadata() {
+        return metadata;
+    }
+
+    public void setMetadata(String metadata) {
+        this.metadata = metadata;
+    }
+
+    public String getOutputKeyType() {
+        return outputKeyType;
+    }
+
+    public void setOutputKeyType(String outputKeyType) {
+        this.outputKeyType = outputKeyType;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Summary.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Summary.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Summary.java
deleted file mode 100644
index e06cfc8..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Experiment_Summary.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-
-import javax.persistence.*;
-import java.sql.Timestamp;
-
-@Entity
-@Table(name ="EXPERIMENT_SUMMARY")
-public class Experiment_Summary {
-    @Id
-    @Column(name = "EXPERIMENT_ID")
-    private String experimentID;
-
-    @ManyToOne
-    @JoinColumn(name = "EXPERIMENT_ID")
-    private Experiment_Metadata experiment_metadata;
-    @Column(name = "STATUS")
-    private String status;
-    @Column(name = "LAST_UPDATED_TIME")
-    private Timestamp last_update_time;
-
-    public Experiment_Metadata getExperiment_metadata() {
-        return experiment_metadata;
-    }
-
-    public void setExperiment_metadata(Experiment_Metadata experiment_metadata) {
-        this.experiment_metadata = experiment_metadata;
-    }
-
-    public String getStatus() {
-        return status;
-    }
-
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    public Timestamp getLast_update_time() {
-        return last_update_time;
-    }
-
-    public void setLast_update_time(Timestamp last_update_time) {
-        this.last_update_time = last_update_time;
-    }
-
-    public String getExperimentID() {
-        return experimentID;
-    }
-
-    public void setExperimentID(String experimentID) {
-        this.experimentID = experimentID;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Data.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Data.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Data.java
deleted file mode 100644
index 4e7fd5b..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Data.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import javax.persistence.*;
-import java.sql.Timestamp;
-
-@Entity
-@Table(name ="GFAC_JOB_DATA")
-public class GFac_Job_Data {
-    @Column(name = "EXPERIMENT_ID")
-    private String experiment_ID;
-    @Column(name = "WORKFLOW_INSTANCE_ID")
-    private String workflow_instanceID;
-    @Column(name = "NODE_ID")
-    private String  node_id;
-    @Column(name = "APPLICATION_DESC_ID")
-    private String application_descriptor_ID;
-    @Column(name = "HOST_DESC_ID")
-    private String host_descriptor_ID;
-    @Column(name = "SERVICE_DESC_ID")
-    private String service_descriptor_ID;
-    @Lob
-    @Column(name = "JOB_DATA")
-    private String job_data;
-    @Id
-    @Column(name = "LOCAL_JOB_ID")
-    private String local_Job_ID;
-    @Column(name = "SUBMITTED_TIME")
-    private Timestamp  submitted_time;
-    @Column(name = "STATUS_UPDATE_TIME")
-    private Timestamp  status_update_time;
-    @Column(name = "STATUS")
-    private String status;
-    @Lob
-    @Column(name = "METADATA")
-    private String metadata;
-
-    @ManyToOne()
-    @JoinColumn(name = "EXPERIMENT_ID")
-    private Experiment_Metadata experiment_metadata;
-
-    @ManyToOne()
-    @JoinColumn(name = "WORKFLOW_INSTANCE_ID")
-    private Workflow_Data workflow_Data;
-
-    public String getExperiment_ID() {
-        return experiment_ID;
-    }
-
-    public String getWorkflow_instanceID() {
-        return workflow_instanceID;
-    }
-
-    public String getNode_id() {
-        return node_id;
-    }
-
-    public String getApplication_descriptor_ID() {
-        return application_descriptor_ID;
-    }
-
-    public String getHost_descriptor_ID() {
-        return host_descriptor_ID;
-    }
-
-    public String getService_descriptor_ID() {
-        return service_descriptor_ID;
-    }
-
-    public String getJob_data() {
-        return job_data;
-    }
-
-    public String getLocal_Job_ID() {
-        return local_Job_ID;
-    }
-
-    public Timestamp getSubmitted_time() {
-        return submitted_time;
-    }
-
-    public Timestamp getStatus_update_time() {
-        return status_update_time;
-    }
-
-    public String getStatus() {
-        return status;
-    }
-
-    public String getMetadata() {
-        return metadata;
-    }
-
-    public Experiment_Metadata getExperiment_metadata() {
-        return experiment_metadata;
-    }
-
-    public Workflow_Data getWorkflow_Data() {
-        return workflow_Data;
-    }
-
-    public void setExperiment_ID(String experiment_ID) {
-        this.experiment_ID = experiment_ID;
-    }
-
-    public void setWorkflow_instanceID(String workflow_instanceID) {
-        this.workflow_instanceID = workflow_instanceID;
-    }
-
-    public void setNode_id(String node_id) {
-        this.node_id = node_id;
-    }
-
-    public void setApplication_descriptor_ID(String application_descriptor_ID) {
-        this.application_descriptor_ID = application_descriptor_ID;
-    }
-
-    public void setHost_descriptor_ID(String host_descriptor_ID) {
-        this.host_descriptor_ID = host_descriptor_ID;
-    }
-
-    public void setService_descriptor_ID(String service_descriptor_ID) {
-        this.service_descriptor_ID = service_descriptor_ID;
-    }
-
-    public void setJob_data(String job_data) {
-        this.job_data = job_data;
-    }
-
-    public void setLocal_Job_ID(String local_Job_ID) {
-        this.local_Job_ID = local_Job_ID;
-    }
-
-    public void setSubmitted_time(Timestamp submitted_time) {
-        this.submitted_time = submitted_time;
-    }
-
-    public void setStatus_update_time(Timestamp status_update_time) {
-        this.status_update_time = status_update_time;
-    }
-
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    public void setMetadata(String metadata) {
-        this.metadata = metadata;
-    }
-
-    public void setExperiment_metadata(Experiment_Metadata experiment_metadata) {
-        this.experiment_metadata = experiment_metadata;
-    }
-
-    public void setWorkflow_Data(Workflow_Data workflow_Data) {
-        this.workflow_Data = workflow_Data;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Status.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Status.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Status.java
deleted file mode 100644
index 9d5d794..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/GFac_Job_Status.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import javax.persistence.*;
-import java.sql.Timestamp;
-
-@Entity
-@Table(name ="GFAC_JOB_STATUS")
-public class GFac_Job_Status {
-    @Column(name = "LOCAL_JOB_ID")
-    private String local_Job_ID;
-    @Column(name = "STATUS_UPDATE_TIME")
-    private Timestamp status_update_time;
-    @Column(name = "STATUS")
-    private String status;
-
-    @ManyToOne()
-    @JoinColumn(name = "LOCAL_JOB_ID")
-    private GFac_Job_Data gFac_job_data;
-
-
-    public String getLocal_Job_ID() {
-        return local_Job_ID;
-    }
-
-    public Timestamp getStatus_update_time() {
-        return status_update_time;
-    }
-
-    public String getStatus() {
-        return status;
-    }
-
-    public void setLocal_Job_ID(String local_Job_ID) {
-        this.local_Job_ID = local_Job_ID;
-    }
-
-    public void setStatus_update_time(Timestamp status_update_time) {
-        this.status_update_time = status_update_time;
-    }
-
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    public GFac_Job_Data getgFac_job_data() {
-        return gFac_job_data;
-    }
-
-    public void setgFac_job_data(GFac_Job_Data gFac_job_data) {
-        this.gFac_job_data = gFac_job_data;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_Data.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_Data.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_Data.java
deleted file mode 100644
index 22e4864..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_Data.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import javax.persistence.*;
-
-@Entity
-@Table(name ="GRAM_DATA")
-@IdClass(Gram_DataPK.class)
-public class Gram_Data {
-
-	@ManyToOne(cascade = CascadeType.PERSIST)
-	@JoinColumn(name = "WORKFLOW_INSTANCE_ID")
-	private Workflow_Data workflow_Data;
-
-    @Id
-    @Column(name = "WORKFLOW_INSTANCE_ID")
-    private String workflow_instanceID;
-	@Id
-    @Column(name = "NODE_ID")
-	private String node_id;
-
-    @Lob
-    @Column(name = "RSL")
-	private byte[] rsl;
-    @Column(name = "INVOKED_HOST")
-    private String invoked_host;
-    @Column(name = "LOCAL_JOB_ID")
-    private String local_Job_ID;
-
-	public Workflow_Data getWorkflow_Data() {
-		return workflow_Data;
-	}
-
-	public void setWorkflow_Data(Workflow_Data workflow_Data) {
-		this.workflow_Data = workflow_Data;
-	}
-
-	public String getNode_id() {
-		return node_id;
-	}
-
-	public void setNode_id(String node_id) {
-		this.node_id = node_id;
-	}
-
-	public byte[] getRsl() {
-		return rsl;
-	}
-
-	public void setRsl(byte[] rsl) {
-		this.rsl = rsl;
-	}
-
-	public String getInvoked_host() {
-		return invoked_host;
-	}
-
-	public void setInvoked_host(String invoked_host) {
-		this.invoked_host = invoked_host;
-	}
-
-    public String getLocal_Job_ID() {
-        return local_Job_ID;
-    }
-
-    public void setLocal_Job_ID(String local_Job_ID) {
-        this.local_Job_ID = local_Job_ID;
-    }
-
-    public String getWorkflow_instanceID() {
-        return workflow_instanceID;
-    }
-
-    public void setWorkflow_instanceID(String workflow_instanceID) {
-        this.workflow_instanceID = workflow_instanceID;
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_DataPK.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_DataPK.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_DataPK.java
deleted file mode 100644
index 78ee3ac..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Gram_DataPK.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-public class Gram_DataPK {
-    private String workflow_instanceID;
-    private String node_id;
-
-    public Gram_DataPK() {
-        ;
-    }
-
-    public Gram_DataPK(String workflow_instanceID, String node_id) {
-        this.workflow_instanceID = workflow_instanceID;
-        this.node_id = node_id;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        return false;
-    }
-
-    @Override
-    public int hashCode() {
-        return 1;
-    }
-
-    public String getWorkflow_instanceID() {
-        return workflow_instanceID;
-    }
-
-    public void setWorkflow_instanceID(String workflow_instanceID) {
-        this.workflow_instanceID = workflow_instanceID;
-    }
-
-    public String getNode_id() {
-        return node_id;
-    }
-
-    public void setNode_id(String node_id) {
-        this.node_id = node_id;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetail.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetail.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetail.java
new file mode 100644
index 0000000..a04e722
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetail.java
@@ -0,0 +1,85 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "JOB_DETAIL")
+@IdClass(JobDetails_PK.class)
+public class JobDetail {
+    @Id
+    @Column(name = "JOB_ID")
+    private String jobId;
+    @Id
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "JOB_DESCRIPTION")
+    private String jobDescription;
+    @Column(name = "CREATION_TIME")
+    private Timestamp creationTime;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    public void setJobId(String jobId) {
+        this.jobId = jobId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getJobDescription() {
+        return jobDescription;
+    }
+
+    public void setJobDescription(String jobDescription) {
+        this.jobDescription = jobDescription;
+    }
+
+    public Timestamp getCreationTime() {
+        return creationTime;
+    }
+
+    public void setCreationTime(Timestamp creationTime) {
+        this.creationTime = creationTime;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetails_PK.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetails_PK.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetails_PK.java
new file mode 100644
index 0000000..bb1cf30
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/JobDetails_PK.java
@@ -0,0 +1,62 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+public class JobDetails_PK {
+    private String jobId;
+    private String taskId;
+
+    public JobDetails_PK(String jobId, String taskId) {
+        this.jobId = jobId;
+        this.taskId = taskId;
+    }
+
+    public JobDetails_PK() {
+        ;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return 1;
+    }
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    public void setJobId(String jobId) {
+        this.jobId = jobId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput.java
new file mode 100644
index 0000000..78a6a30
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput.java
@@ -0,0 +1,94 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "NODE_INPUT")
+@IdClass(NodeInput_PK.class)
+public class NodeInput {
+    @Id
+    @Column(name = "NODE_INSTANCE_ID")
+    private String nodeId;
+    @Id
+    @Column(name = "INPUT_KEY")
+    private String inputKey;
+    @Column(name = "INPUT_KEY_TYPE")
+    private String inputKeyType;
+    @Column(name = "METADATA")
+    private String metadata;
+    @Column(name = "VALUE")
+    private String value;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "NODE_INSTANCE_ID")
+    private WorkflowNodeDetail nodeDetails;
+
+    public String getInputKey() {
+        return inputKey;
+    }
+
+    public void setInputKey(String inputKey) {
+        this.inputKey = inputKey;
+    }
+
+    public String getInputKeyType() {
+        return inputKeyType;
+    }
+
+    public void setInputKeyType(String inputKeyType) {
+        this.inputKeyType = inputKeyType;
+    }
+
+    public String getMetadata() {
+        return metadata;
+    }
+
+    public void setMetadata(String metadata) {
+        this.metadata = metadata;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public WorkflowNodeDetail getNodeDetails() {
+        return nodeDetails;
+    }
+
+    public void setNodeDetails(WorkflowNodeDetail nodeDetails) {
+        this.nodeDetails = nodeDetails;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput_PK.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput_PK.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput_PK.java
new file mode 100644
index 0000000..d7526db
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeInput_PK.java
@@ -0,0 +1,62 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+public class NodeInput_PK {
+    private String nodeId;
+    private String inputKey;
+
+    public NodeInput_PK(String nodeId, String inputKey) {
+        this.nodeId = nodeId;
+        this.inputKey = inputKey;
+    }
+
+    public NodeInput_PK() {
+        ;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return 1;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public String getInputKey() {
+        return inputKey;
+    }
+
+    public void setInputKey(String inputKey) {
+        this.inputKey = inputKey;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput.java
new file mode 100644
index 0000000..7bf0cea
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput.java
@@ -0,0 +1,94 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "NODE_OUTPUT")
+@IdClass(NodeOutput_PK.class)
+public class NodeOutput {
+    @Id
+    @Column(name = "NODE_INSTANCE_ID")
+    private String nodeId;
+    @Id
+    @Column(name = "OUTPUT_KEY")
+    private String outputKey;
+    @Column(name = "OUTPUT_KEY_TYPE")
+    private String outputKeyType;
+    @Column(name = "METADATA")
+    private String metadata;
+    @Column(name = "VALUE")
+    private String value;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "NODE_INSTANCE_ID")
+    private WorkflowNodeDetail node;
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public String getMetadata() {
+        return metadata;
+    }
+
+    public void setMetadata(String metadata) {
+        this.metadata = metadata;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public void setValue(String value) {
+        this.value = value;
+    }
+
+    public WorkflowNodeDetail getNode() {
+        return node;
+    }
+
+    public void setNode(WorkflowNodeDetail node) {
+        this.node = node;
+    }
+
+    public String getOutputKey() {
+        return outputKey;
+    }
+
+    public void setOutputKey(String outputKey) {
+        this.outputKey = outputKey;
+    }
+
+    public String getOutputKeyType() {
+        return outputKeyType;
+    }
+
+    public void setOutputKeyType(String outputKeyType) {
+        this.outputKeyType = outputKeyType;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput_PK.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput_PK.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput_PK.java
new file mode 100644
index 0000000..0d2ae71
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/NodeOutput_PK.java
@@ -0,0 +1,62 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+public class NodeOutput_PK {
+    private String nodeId;
+    private String outputKey;
+
+    public NodeOutput_PK(String nodeId, String outputKey) {
+        this.nodeId = nodeId;
+        this.outputKey = outputKey;
+    }
+
+    public NodeOutput_PK() {
+        ;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return 1;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public String getOutputKey() {
+        return outputKey;
+    }
+
+    public void setOutputKey(String outputKey) {
+        this.outputKey = outputKey;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_Data.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_Data.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_Data.java
deleted file mode 100644
index 1ccdbac..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_Data.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import java.sql.Timestamp;
-
-import javax.persistence.*;
-
-@Entity
-@Table(name ="NODE_DATA")
-@IdClass(Node_DataPK.class)
-public class Node_Data {
-
-	@Id
-    @Column(name = "WORKFLOW_INSTANCE_ID")
-    private String workflow_instanceID;
-
-	@ManyToOne()
-	@JoinColumn(name = "WORKFLOW_INSTANCE_ID")
-	private Workflow_Data workflow_Data;
-
-	@Id
-    @Column(name = "NODE_ID")
-	private String node_id;
-
-    @Id
-    @Column(name = "EXECUTION_INDEX")
-    private int execution_index;
-
-    @Column(name = "NODE_TYPE")
-	private String node_type;
-	@Lob
-    @Column(name = "INPUTS")
-    private byte[] inputs;
-	@Lob
-    @Column(name = "OUTPUTS")
-    private byte[] outputs;
-    @Column(name = "STATUS")
-    private String status;
-    @Column(name = "START_TIME")
-    private Timestamp start_time;
-    @Column(name = "LAST_UPDATE_TIME")
-    private Timestamp last_update_time;
-
-	public Workflow_Data getWorkflow_Data() {
-		return workflow_Data;
-	}
-
-	public void setWorkflow_Data(Workflow_Data workflow_Data) {
-		this.workflow_Data = workflow_Data;
-	}
-
-	public String getNode_id() {
-		return node_id;
-	}
-
-	public void setNode_id(String node_id) {
-		this.node_id = node_id;
-	}
-
-	public String getNode_type() {
-		return node_type;
-	}
-
-	public void setNode_type(String node_type) {
-		this.node_type = node_type;
-	}
-
-	public byte[] getInputs() {
-		return inputs;
-	}
-
-	public void setInputs(byte[] inputs) {
-		this.inputs = inputs;
-	}
-
-	public byte[] getOutputs() {
-		return outputs;
-	}
-
-	public void setOutputs(byte[] outputs) {
-		this.outputs = outputs;
-	}
-
-	public String getStatus() {
-		return status;
-	}
-
-	public void setStatus(String status) {
-		this.status = status;
-	}
-
-	public Timestamp getStart_time() {
-		return start_time;
-	}
-
-	public void setStart_time(Timestamp start_time) {
-		this.start_time = start_time;
-	}
-
-	public Timestamp getLast_update_time() {
-		return last_update_time;
-	}
-
-	public void setLast_update_time(Timestamp last_update_time) {
-		this.last_update_time = last_update_time;
-	}
-
-    public String getWorkflow_instanceID() {
-        return workflow_instanceID;
-    }
-
-    public void setWorkflow_instanceID(String workflow_instanceID) {
-        this.workflow_instanceID = workflow_instanceID;
-    }
-
-    public int getExecution_index() {
-        return execution_index;
-    }
-
-    public void setExecution_index(int execution_index) {
-        this.execution_index = execution_index;
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_DataPK.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_DataPK.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_DataPK.java
deleted file mode 100644
index ddc12ec..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Node_DataPK.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.persistance.registry.jpa.model;
-
-public class Node_DataPK {
-    private String workflow_instanceID;
-    private String node_id;
-    private int execution_index;
-
-    public Node_DataPK() {
-        ;
-    }
-
-    public Node_DataPK(String workflow_instanceID, String node_id, int execution_index) {
-        this.workflow_instanceID = workflow_instanceID;
-        this.node_id = node_id;
-        this.execution_index = execution_index;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        return false;
-    }
-
-    @Override
-    public int hashCode() {
-        return 1;
-    }
-
-    public String getWorkflow_instanceID() {
-        return workflow_instanceID;
-    }
-
-    public void setWorkflow_instanceID(String workflow_instanceID) {
-        this.workflow_instanceID = workflow_instanceID;
-    }
-
-    public String getNode_id() {
-        return node_id;
-    }
-
-    public void setNode_id(String node_id) {
-        this.node_id = node_id;
-    }
-
-    public int getExecution_index() {
-        return execution_index;
-    }
-
-    public void setExecution_index(int execution_index) {
-        this.execution_index = execution_index;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Orchestrator.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Orchestrator.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Orchestrator.java
deleted file mode 100644
index f0f07d8..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Orchestrator.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import java.sql.Timestamp;
-
-import javax.persistence.*;
-
-@Entity
-@Table(name = "ORCHESTRATOR")
-public class Orchestrator {
-	
-	@Id
-    @Column(name = "EXPERIMENT_ID")
-    private String experiment_ID;
-    @Column(name = "USERNAME")
-	private String username;
-    @Column(name = "STATUS")
-    private String status;
-    @Column(name = "STATE")
-    private String state;
-    @Column(name = "GFAC_EPR")
-    private String gfacEPR;
-    @Column(name = "APPLICATION_NAME")
-    private String applicationName;
-	@Lob
-    @Column(name = "JOBREQUEST")
-	private String jobRequest;
-    @Column(name = "SUBMITTED_TIME")
-    private Timestamp  submitted_time;
-    @Column(name = "STATUS_UPDATE_TIME")
-    private Timestamp  status_update_time;
-	
-	public String getExperiment_ID() {
-		return experiment_ID;
-	}
-	public void setExperiment_ID(String experiment_ID) {
-		this.experiment_ID = experiment_ID;
-	}
-	
-	public String getUserName() {
-		return username;
-	}
-	public void setUserName(String username) {
-		this.username = username;
-	}
-	public String getStatus() {
-		return status;
-	}
-	public void setStatus(String status) {
-		this.status = status;
-	}
-	public String getState() {
-		return state;
-	}
-	public void setState(String state) {
-		this.state = state;
-	}
-	public String getGfacEPR() {
-		return gfacEPR;
-	}
-	public void setGfacEPR(String gfacEPR) {
-		this.gfacEPR = gfacEPR;
-	}
-	public String getApplicationName() {
-		return applicationName;
-	}
-	public void setApplicationName(String applicationName) {
-		this.applicationName = applicationName;
-	}
-	public String getJobRequest() {
-		return jobRequest;
-	}
-	public void setJobRequest(String jobRequest) {
-		this.jobRequest = jobRequest;
-	}
-	public Timestamp getSubmittedTime() {
-		return submitted_time;
-	}
-	public void setSubmittedTime(Timestamp submitted_time) {
-		this.submitted_time = submitted_time;
-	}
-	public Timestamp getStatusUpdateTime() {
-		return status_update_time;
-	}
-	public void setStatusUpdateTime(Timestamp status_update_time) {
-		this.status_update_time = status_update_time;
-	}
-	
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/QosParam.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/QosParam.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/QosParam.java
new file mode 100644
index 0000000..9c73497
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/QosParam.java
@@ -0,0 +1,115 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+
+@Entity
+@Table(name = "QOS_PARAMS")
+public class QosParam {
+    @Id
+    @GeneratedValue
+    @Column(name = "QOS_ID")
+    private int qosId;
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "START_EXECUTION_AT")
+    private String startExecutionAt;
+    @Column(name = "EXECUTE_BEFORE")
+    private String executeBefore;
+    @Column(name = "EXECUTE_BEFORE")
+    private int noOfRetries;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    public int getQosId() {
+        return qosId;
+    }
+
+    public void setQosId(int qosId) {
+        this.qosId = qosId;
+    }
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getStartExecutionAt() {
+        return startExecutionAt;
+    }
+
+    public void setStartExecutionAt(String startExecutionAt) {
+        this.startExecutionAt = startExecutionAt;
+    }
+
+    public String getExecuteBefore() {
+        return executeBefore;
+    }
+
+    public void setExecuteBefore(String executeBefore) {
+        this.executeBefore = executeBefore;
+    }
+
+    public int getNoOfRetries() {
+        return noOfRetries;
+    }
+
+    public void setNoOfRetries(int noOfRetries) {
+        this.noOfRetries = noOfRetries;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+}


[2/8] new datamodels for AIRAVATA-1017

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/Utils.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/Utils.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/Utils.java
index bd85c4d..09b417f 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/Utils.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/Utils.java
@@ -269,89 +269,89 @@ public class Utils {
                     throw  new IllegalArgumentException("Object should be a Gateway Worker.");
                 }
             case EXPERIMENT_METADATA:
-                if (o instanceof  Experiment_Metadata){
-                    return createExperimentMetadata((Experiment_Metadata)o);
-                }else {
-                    logger.error("Object should be a Experiment MetaData.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Experiment MetaData.");
-                }
+//                if (o instanceof  Experiment_Metadata){
+//                    return createExperimentMetadata((Experiment_Metadata)o);
+//                }else {
+//                    logger.error("Object should be a Experiment MetaData.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Experiment MetaData.");
+//                }
             case EXPERIMENT_CONFIG_DATA:
-                if (o instanceof  Experiment_Configuration_Data){
-                    return createExperimentConfigData((Experiment_Configuration_Data)o);
-                }else {
-                    logger.error("Object should be a Experiment Config data.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Experiment Config data.");
-                }
+//                if (o instanceof  Experiment_Configuration_Data){
+//                    return createExperimentConfigData((Experiment_Configuration_Data)o);
+//                }else {
+//                    logger.error("Object should be a Experiment Config data.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Experiment Config data.");
+//                }
             case EXPERIMENT_SUMMARY:
-                if (o instanceof  Experiment_Summary){
-                    return createExperimentSummary((Experiment_Summary)o);
-                }else {
-                    logger.error("Object should be a Experiment Summary data.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Experiment Summary data.");
-                }
+//                if (o instanceof  Experiment_Summary){
+//                    return createExperimentSummary((Experiment_Summary)o);
+//                }else {
+//                    logger.error("Object should be a Experiment Summary data.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Experiment Summary data.");
+//                }
             case EXPERIMENT_INPUT:
-                if (o instanceof  Experiment_Input){
-                    return createExperimentInput((Experiment_Input)o);
-                }else {
-                    logger.error("Object should be a Experiment input data.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Experiment input data.");
-                }
+//                if (o instanceof  Experiment_Input){
+//                    return createExperimentInput((Experiment_Input)o);
+//                }else {
+//                    logger.error("Object should be a Experiment input data.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Experiment input data.");
+//                }
             case EXPERIMENT_OUTPUT:
-                if (o instanceof  Experiment_Output){
-                    return createExperimentOutput((Experiment_Output)o);
-                }else {
-                    logger.error("Object should be a Experiment output data.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Experiment output data.");
-                }
+//                if (o instanceof  Experiment_Output){
+//                    return createExperimentOutput((Experiment_Output)o);
+//                }else {
+//                    logger.error("Object should be a Experiment output data.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Experiment output data.");
+//                }
             case WORKFLOW_DATA:
-                if (o instanceof  Workflow_Data){
-                    return createWorkflowData((Workflow_Data) o);
-                }else {
-                    logger.error("Object should be a Workflow Data.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Workflow Data.");
-                }
+//                if (o instanceof  Workflow_Data){
+//                    return createWorkflowData((Workflow_Data) o);
+//                }else {
+//                    logger.error("Object should be a Workflow Data.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Workflow Data.");
+//                }
             case NODE_DATA:
-                if (o instanceof  Node_Data){
-                    return createNodeData((Node_Data) o);
-                }else {
-                    logger.error("Object should be a Node Data.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Node Data.");
-                }
+//                if (o instanceof  Node_Data){
+//                    return createNodeData((Node_Data) o);
+//                }else {
+//                    logger.error("Object should be a Node Data.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Node Data.");
+//                }
             case GRAM_DATA:
-                if (o instanceof  Gram_Data){
-                    return createGramData((Gram_Data) o);
-                }else {
-                    logger.error("Object should be a Gram Data.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Gram Data.");
-                }
+//                if (o instanceof  Gram_Data){
+//                    return createGramData((Gram_Data) o);
+//                }else {
+//                    logger.error("Object should be a Gram Data.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Gram Data.");
+//                }
             case EXECUTION_ERROR:
-                if (o instanceof Execution_Error){
-                    return createExecutionError((Execution_Error) o);
-                }else {
-                    logger.error("Object should be a Node Error type.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a Node Error.");
-                }
+//                if (o instanceof Execution_Error){
+//                    return createExecutionError((Execution_Error) o);
+//                }else {
+//                    logger.error("Object should be a Node Error type.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a Node Error.");
+//                }
             case GFAC_JOB_DATA:
-                if (o instanceof GFac_Job_Data){
-                    return createGfacJobData((GFac_Job_Data) o);
-                }else {
-                    logger.error("Object should be a GFac Job Data type.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a GFac Job Data.");
-                }
+//                if (o instanceof GFac_Job_Data){
+//                    return createGfacJobData((GFac_Job_Data) o);
+//                }else {
+//                    logger.error("Object should be a GFac Job Data type.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a GFac Job Data.");
+//                }
             case GFAC_JOB_STATUS:
-                if (o instanceof GFac_Job_Status){
-                    return createGfacJobStatus((GFac_Job_Status) o);
-                }else {
-                    logger.error("Object should be a GFac Job Status type.", new IllegalArgumentException());
-                    throw new IllegalArgumentException("Object should be a GFac Job Status.");
-                }
+//                if (o instanceof GFac_Job_Status){
+//                    return createGfacJobStatus((GFac_Job_Status) o);
+//                }else {
+//                    logger.error("Object should be a GFac Job Status type.", new IllegalArgumentException());
+//                    throw new IllegalArgumentException("Object should be a GFac Job Status.");
+//                }
             case ORCHESTRATOR:
-            	if(o instanceof Orchestrator){
-            		return createOrchestratorData((Orchestrator)o);
-            	}else{
-            		 logger.error("Object should be orchestrator data.", new IllegalArgumentException());
-                     throw new IllegalArgumentException("Object should be orchestrator data.");
-            	}
+//            	if(o instanceof Orchestrator){
+//            		return createOrchestratorData((Orchestrator)o);
+//            	}else{
+//            		 logger.error("Object should be orchestrator data.", new IllegalArgumentException());
+//                     throw new IllegalArgumentException("Object should be orchestrator data.");
+//            	}
             default:
         }
         return null;
@@ -547,206 +547,206 @@ public class Utils {
 //        return experimentDataResource;
 //    }
 
-    private static Resource createExperimentConfigData(Experiment_Configuration_Data o){
-        ExperimentConfigDataResource exConfigDataResource = new ExperimentConfigDataResource();
-        ExperimentMetadataResource experimentMetadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
-        exConfigDataResource.setExMetadata(experimentMetadata);
-        exConfigDataResource.setAiravataAutoSchedule(o.isAiravata_auto_schedule());
-        exConfigDataResource.setCleanAfterJob(o.isClean_after_job());
-        exConfigDataResource.setCpuCount(o.getTotal_cpu_count());
-        exConfigDataResource.setDataRegURL(o.getData_reg_url());
-        exConfigDataResource.setJobStartTime(o.getJob_start_time());
-        exConfigDataResource.setNodeCount(o.getNode_count());
-        exConfigDataResource.setNumberOfThreads(o.getNumber_of_threads());
-        exConfigDataResource.setOutputDataDir(o.getOutput_data_dir());
-        exConfigDataResource.setOverrideManualSchedule(o.isOverride_manual_schedule());
-        exConfigDataResource.setPersistOutputData(o.isPersist_output_data());
-        exConfigDataResource.setPhysicalMemory(o.getTotal_physical_memory());
-        exConfigDataResource.setProjectAccount(o.getComputational_project_account());
-        exConfigDataResource.setQueueName(o.getQueue_name());
-        exConfigDataResource.setResourceHostID(o.getResource_host_id());
-        exConfigDataResource.setStageInputsToWDir(o.isStage_input_files_to_working_dir());
-        exConfigDataResource.setWallTimeLimit(o.getWalltime_limit());
-        exConfigDataResource.setOverrideManualSchedule(o.isOverride_manual_schedule());
-        exConfigDataResource.setWorkingDir(o.getUnique_working_dir());
-        exConfigDataResource.setWorkingDirParent(o.getWorking_dir_parent());
-        exConfigDataResource.setApplicationID(o.getApplication_id());
-        exConfigDataResource.setApplicationVersion(o.getApplication_version());
-        exConfigDataResource.setWorkflowTemplateId(o.getWorkflow_template_id());
-        exConfigDataResource.setWorkflowTemplateVersion(o.getWorkflow_template_version());
-        exConfigDataResource.setStartExecutionAt(o.getStart_execution_at());
-        exConfigDataResource.setExecuteBefore(o.getExecute_before());
-        exConfigDataResource.setNumberOfRetries(o.getNumber_of_retries());
-        return exConfigDataResource;
-    }
+//    private static Resource createExperimentConfigData(Experiment_Configuration_Data o){
+//        ExperimentConfigDataResource exConfigDataResource = new ExperimentConfigDataResource();
+//        ExperimentMetadataResource experimentMetadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
+//        exConfigDataResource.setExMetadata(experimentMetadata);
+//        exConfigDataResource.setAiravataAutoSchedule(o.isAiravata_auto_schedule());
+//        exConfigDataResource.setCleanAfterJob(o.isClean_after_job());
+//        exConfigDataResource.setCpuCount(o.getTotal_cpu_count());
+//        exConfigDataResource.setDataRegURL(o.getData_reg_url());
+//        exConfigDataResource.setJobStartTime(o.getJob_start_time());
+//        exConfigDataResource.setNodeCount(o.getNode_count());
+//        exConfigDataResource.setNumberOfThreads(o.getNumber_of_threads());
+//        exConfigDataResource.setOutputDataDir(o.getOutput_data_dir());
+//        exConfigDataResource.setOverrideManualSchedule(o.isOverride_manual_schedule());
+//        exConfigDataResource.setPersistOutputData(o.isPersist_output_data());
+//        exConfigDataResource.setPhysicalMemory(o.getTotal_physical_memory());
+//        exConfigDataResource.setProjectAccount(o.getComputational_project_account());
+//        exConfigDataResource.setQueueName(o.getQueue_name());
+//        exConfigDataResource.setResourceHostID(o.getResource_host_id());
+//        exConfigDataResource.setStageInputsToWDir(o.isStage_input_files_to_working_dir());
+//        exConfigDataResource.setWallTimeLimit(o.getWalltime_limit());
+//        exConfigDataResource.setOverrideManualSchedule(o.isOverride_manual_schedule());
+//        exConfigDataResource.setWorkingDir(o.getUnique_working_dir());
+//        exConfigDataResource.setWorkingDirParent(o.getWorking_dir_parent());
+//        exConfigDataResource.setApplicationID(o.getApplication_id());
+//        exConfigDataResource.setApplicationVersion(o.getApplication_version());
+//        exConfigDataResource.setWorkflowTemplateId(o.getWorkflow_template_id());
+//        exConfigDataResource.setWorkflowTemplateVersion(o.getWorkflow_template_version());
+//        exConfigDataResource.setStartExecutionAt(o.getStart_execution_at());
+//        exConfigDataResource.setExecuteBefore(o.getExecute_before());
+//        exConfigDataResource.setNumberOfRetries(o.getNumber_of_retries());
+//        return exConfigDataResource;
+//    }
 
     /**
      *
      * @param o Experiment MetaData model object
      * @return Experiment MetaData resource object
      */
-    private static Resource createExperimentMetadata(Experiment_Metadata o) {
-        ExperimentMetadataResource experimentMetadataResource = new ExperimentMetadataResource();
-        experimentMetadataResource.setExpID(o.getExperiment_id());
-        experimentMetadataResource.setDescription(o.getDescription());
-        Gateway_Worker gw = new Gateway_Worker();
-        gw.setGateway(o.getGateway());
-        experimentMetadataResource.setExecutionUser(o.getExecution_user());
-        experimentMetadataResource.setShareExp(o.isShare_experiment());
-        experimentMetadataResource.setSubmittedDate(o.getSubmitted_date());
-        GatewayResource gResource = (GatewayResource)createGateway(o.getGateway());
-        experimentMetadataResource.setGateway(gResource);
-        ProjectResource pResource = (ProjectResource)createProject(o.getProject());
-        experimentMetadataResource.setProject(pResource);
-        experimentMetadataResource.setExperimentName(o.getExperiment_name());
-        return experimentMetadataResource;
-    }
+//    private static Resource createExperimentMetadata(Experiment_Metadata o) {
+//        ExperimentMetadataResource experimentMetadataResource = new ExperimentMetadataResource();
+//        experimentMetadataResource.setExpID(o.getExperiment_id());
+//        experimentMetadataResource.setDescription(o.getDescription());
+//        Gateway_Worker gw = new Gateway_Worker();
+//        gw.setGateway(o.getGateway());
+//        experimentMetadataResource.setExecutionUser(o.getExecution_user());
+//        experimentMetadataResource.setShareExp(o.isShare_experiment());
+//        experimentMetadataResource.setSubmittedDate(o.getSubmitted_date());
+//        GatewayResource gResource = (GatewayResource)createGateway(o.getGateway());
+//        experimentMetadataResource.setGateway(gResource);
+//        ProjectResource pResource = (ProjectResource)createProject(o.getProject());
+//        experimentMetadataResource.setProject(pResource);
+//        experimentMetadataResource.setExperimentName(o.getExperiment_name());
+//        return experimentMetadataResource;
+//    }
 
-    private static Resource createExperimentSummary (Experiment_Summary o){
-        ExperimentSummaryResource summaryResource = new ExperimentSummaryResource();
-        ExperimentMetadataResource metadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
-        summaryResource.setExperimentMetadataResource(metadata);
-        summaryResource.setLastUpdateTime(o.getLast_update_time());
-        summaryResource.setStatus(o.getStatus());
-        return summaryResource;
-    }
+//    private static Resource createExperimentSummary (Experiment_Summary o){
+//        ExperimentSummaryResource summaryResource = new ExperimentSummaryResource();
+//        ExperimentMetadataResource metadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
+//        summaryResource.setExperimentMetadataResource(metadata);
+//        summaryResource.setLastUpdateTime(o.getLast_update_time());
+//        summaryResource.setStatus(o.getStatus());
+//        return summaryResource;
+//    }
 
-    private static Resource createExperimentInput (Experiment_Input o){
-        ExperimentInputResource eInputResource = new ExperimentInputResource();
-        ExperimentMetadataResource metadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
-        eInputResource.setExperimentMetadataResource(metadata);
-        eInputResource.setExperimentKey(o.getEx_key());
-        eInputResource.setValue(o.getValue());
-        return eInputResource;
-    }
+//    private static Resource createExperimentInput (Experiment_Input o){
+//        ExperimentInputResource eInputResource = new ExperimentInputResource();
+//        ExperimentMetadataResource metadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
+//        eInputResource.setExperimentMetadataResource(metadata);
+//        eInputResource.setExperimentKey(o.getEx_key());
+//        eInputResource.setValue(o.getValue());
+//        return eInputResource;
+//    }
 
-    private static Resource createExperimentOutput (Experiment_Output o){
-        ExperimentOutputResource eOutputResource = new ExperimentOutputResource();
-        ExperimentMetadataResource metadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
-        eOutputResource.setExperimentMetadataResource(metadata);
-        eOutputResource.setExperimentKey(o.getEx_key());
-        eOutputResource.setValue(o.getValue());
-        return eOutputResource;
-    }
+//    private static Resource createExperimentOutput (Experiment_Output o){
+//        ExperimentOutputResource eOutputResource = new ExperimentOutputResource();
+//        ExperimentMetadataResource metadata = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
+//        eOutputResource.setExperimentMetadataResource(metadata);
+//        eOutputResource.setExperimentKey(o.getEx_key());
+//        eOutputResource.setValue(o.getValue());
+//        return eOutputResource;
+//    }
 
     /**
      *
      * @param o  Workflow_Data model object
      * @return  WorkflowDataResource object
      */
-    private static Resource createWorkflowData(Workflow_Data o){
-        WorkflowDataResource workflowDataResource = new WorkflowDataResource();
-        workflowDataResource.setExperimentID(o.getExperiment_data().getExperiment_id());
-        workflowDataResource.setWorkflowInstanceID(o.getWorkflow_instanceID());
-        workflowDataResource.setTemplateName(o.getTemplate_name());
-        workflowDataResource.setStatus(o.getStatus());
-        workflowDataResource.setStartTime(o.getStart_time());
-        workflowDataResource.setLastUpdatedTime(o.getLast_update_time());
-        return workflowDataResource;
-    }
+//    private static Resource createWorkflowData(Workflow_Data o){
+//        WorkflowDataResource workflowDataResource = new WorkflowDataResource();
+//        workflowDataResource.setExperimentID(o.getExperiment_data().getExperiment_id());
+//        workflowDataResource.setWorkflowInstanceID(o.getWorkflow_instanceID());
+//        workflowDataResource.setTemplateName(o.getTemplate_name());
+//        workflowDataResource.setStatus(o.getStatus());
+//        workflowDataResource.setStartTime(o.getStart_time());
+//        workflowDataResource.setLastUpdatedTime(o.getLast_update_time());
+//        return workflowDataResource;
+//    }
 
     /**
      *
      * @param o  Node_Data model object
      * @return Node Data resource
      */
-    private static Resource createNodeData (Node_Data o){
-        NodeDataResource nodeDataResource = new NodeDataResource();
-        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
-        nodeDataResource.setWorkflowDataResource(workflowDataResource);
-        nodeDataResource.setNodeID(o.getNode_id());
-        nodeDataResource.setNodeType(o.getNode_type());
-        if (o.getInputs()!=null) {
-			nodeDataResource.setInputs(new String(o.getInputs()));
-		}
-		if (o.getOutputs()!=null) {
-			nodeDataResource.setOutputs(new String(o.getOutputs()));
-		}
-		nodeDataResource.setStatus(o.getStatus());
-        nodeDataResource.setStartTime(o.getStart_time());
-        nodeDataResource.setLastUpdateTime(o.getLast_update_time());
-        nodeDataResource.setExecutionIndex(o.getExecution_index());
-        return nodeDataResource;
-    }
+//    private static Resource createNodeData (Node_Data o){
+//        NodeDataResource nodeDataResource = new NodeDataResource();
+//        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
+//        nodeDataResource.setWorkflowDataResource(workflowDataResource);
+//        nodeDataResource.setNodeID(o.getNode_id());
+//        nodeDataResource.setNodeType(o.getNode_type());
+//        if (o.getInputs()!=null) {
+//			nodeDataResource.setInputs(new String(o.getInputs()));
+//		}
+//		if (o.getOutputs()!=null) {
+//			nodeDataResource.setOutputs(new String(o.getOutputs()));
+//		}
+//		nodeDataResource.setStatus(o.getStatus());
+//        nodeDataResource.setStartTime(o.getStart_time());
+//        nodeDataResource.setLastUpdateTime(o.getLast_update_time());
+//        nodeDataResource.setExecutionIndex(o.getExecution_index());
+//        return nodeDataResource;
+//    }
 
     /**
      *
      * @param o GramData model object
      * @return GramData Resource object
      */
-    private static Resource createGramData (Gram_Data o){
-        GramDataResource gramDataResource = new GramDataResource();
-        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
-        gramDataResource.setWorkflowDataResource(workflowDataResource);
-        gramDataResource.setNodeID(o.getNode_id());
-        gramDataResource.setRsl(new String(o.getRsl()));
-        gramDataResource.setInvokedHost(o.getInvoked_host());
-        gramDataResource.setLocalJobID(o.getLocal_Job_ID());
-        return gramDataResource;
-    }
+//    private static Resource createGramData (Gram_Data o){
+//        GramDataResource gramDataResource = new GramDataResource();
+//        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
+//        gramDataResource.setWorkflowDataResource(workflowDataResource);
+//        gramDataResource.setNodeID(o.getNode_id());
+//        gramDataResource.setRsl(new String(o.getRsl()));
+//        gramDataResource.setInvokedHost(o.getInvoked_host());
+//        gramDataResource.setLocalJobID(o.getLocal_Job_ID());
+//        return gramDataResource;
+//    }
 
-    private static Resource createExecutionError(Execution_Error o){
-        ExecutionErrorResource executionErrorResource = new ExecutionErrorResource();
-        ExperimentMetadataResource experimentDataResource = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_MetaData());
-        executionErrorResource.setMetadataResource(experimentDataResource);
-        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
-        executionErrorResource.setWorkflowDataResource(workflowDataResource);
-        executionErrorResource.setNodeID(o.getNode_id());
-        executionErrorResource.setErrorID(o.getError_id());
-        executionErrorResource.setGfacJobID(o.getGfacJobID());
-        executionErrorResource.setSourceType(o.getSource_type());
-        executionErrorResource.setErrorTime(o.getError_date());
-        executionErrorResource.setErrorMsg(o.getError_msg());
-        executionErrorResource.setErrorDes(o.getError_des());
-        executionErrorResource.setErrorCode(o.getError_code());
-        executionErrorResource.setErrorReporter(o.getError_reporter());
-        executionErrorResource.setErrorLocation(o.getError_location());
-        executionErrorResource.setActionTaken(o.getAction_taken());
-        executionErrorResource.setErrorReference(o.getError_reference());
-        return executionErrorResource;
-    }
+//    private static Resource createExecutionError(Execution_Error o){
+//        ExecutionErrorResource executionErrorResource = new ExecutionErrorResource();
+//        ExperimentMetadataResource experimentDataResource = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_MetaData());
+//        executionErrorResource.setMetadataResource(experimentDataResource);
+//        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
+//        executionErrorResource.setWorkflowDataResource(workflowDataResource);
+//        executionErrorResource.setNodeID(o.getNode_id());
+//        executionErrorResource.setErrorID(o.getError_id());
+//        executionErrorResource.setGfacJobID(o.getGfacJobID());
+//        executionErrorResource.setSourceType(o.getSource_type());
+//        executionErrorResource.setErrorTime(o.getError_date());
+//        executionErrorResource.setErrorMsg(o.getError_msg());
+//        executionErrorResource.setErrorDes(o.getError_des());
+//        executionErrorResource.setErrorCode(o.getError_code());
+//        executionErrorResource.setErrorReporter(o.getError_reporter());
+//        executionErrorResource.setErrorLocation(o.getError_location());
+//        executionErrorResource.setActionTaken(o.getAction_taken());
+//        executionErrorResource.setErrorReference(o.getError_reference());
+//        return executionErrorResource;
+//    }
 
-    private static Resource createGfacJobData (GFac_Job_Data o){
-        GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
-        ExperimentMetadataResource experimentDataResource = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
-        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
-        gFacJobDataResource.setMetadataResource(experimentDataResource);
-        gFacJobDataResource.setWorkflowDataResource(workflowDataResource);
-        gFacJobDataResource.setNodeID(o.getNode_id());
-        gFacJobDataResource.setApplicationDescID(o.getApplication_descriptor_ID());
-        gFacJobDataResource.setServiceDescID(o.getService_descriptor_ID());
-        gFacJobDataResource.setHostDescID(o.getHost_descriptor_ID());
-        gFacJobDataResource.setJobData(o.getJob_data());
-        gFacJobDataResource.setLocalJobID(o.getLocal_Job_ID());
-        gFacJobDataResource.setSubmittedTime(o.getSubmitted_time());
-        gFacJobDataResource.setStatusUpdateTime(o.getStatus_update_time());
-        gFacJobDataResource.setStatus(o.getStatus());
-        gFacJobDataResource.setMetadata(o.getMetadata());
-        return gFacJobDataResource;
-    }
+//    private static Resource createGfacJobData (GFac_Job_Data o){
+//        GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
+//        ExperimentMetadataResource experimentDataResource = (ExperimentMetadataResource)createExperimentMetadata(o.getExperiment_metadata());
+//        WorkflowDataResource workflowDataResource = (WorkflowDataResource)createWorkflowData(o.getWorkflow_Data());
+//        gFacJobDataResource.setMetadataResource(experimentDataResource);
+//        gFacJobDataResource.setWorkflowDataResource(workflowDataResource);
+//        gFacJobDataResource.setNodeID(o.getNode_id());
+//        gFacJobDataResource.setApplicationDescID(o.getApplication_descriptor_ID());
+//        gFacJobDataResource.setServiceDescID(o.getService_descriptor_ID());
+//        gFacJobDataResource.setHostDescID(o.getHost_descriptor_ID());
+//        gFacJobDataResource.setJobData(o.getJob_data());
+//        gFacJobDataResource.setLocalJobID(o.getLocal_Job_ID());
+//        gFacJobDataResource.setSubmittedTime(o.getSubmitted_time());
+//        gFacJobDataResource.setStatusUpdateTime(o.getStatus_update_time());
+//        gFacJobDataResource.setStatus(o.getStatus());
+//        gFacJobDataResource.setMetadata(o.getMetadata());
+//        return gFacJobDataResource;
+//    }
 
-    private static Resource createGfacJobStatus(GFac_Job_Status o) {
-        GFacJobStatusResource gFacJobStatusResource = new GFacJobStatusResource();
-        gFacJobStatusResource.setLocalJobID(o.getLocal_Job_ID());
-        gFacJobStatusResource.setStatus(o.getStatus());
-        gFacJobStatusResource.setStatusUpdateTime(o.getStatus_update_time());
-        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)createGfacJobData(o.getgFac_job_data());
-        gFacJobStatusResource.setgFacJobDataResource(gFacJobDataResource);
-        return gFacJobStatusResource;
-    }
+//    private static Resource createGfacJobStatus(GFac_Job_Status o) {
+//        GFacJobStatusResource gFacJobStatusResource = new GFacJobStatusResource();
+//        gFacJobStatusResource.setLocalJobID(o.getLocal_Job_ID());
+//        gFacJobStatusResource.setStatus(o.getStatus());
+//        gFacJobStatusResource.setStatusUpdateTime(o.getStatus_update_time());
+//        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)createGfacJobData(o.getgFac_job_data());
+//        gFacJobStatusResource.setgFacJobDataResource(gFacJobDataResource);
+//        return gFacJobStatusResource;
+//    }
 
-    private static Resource createOrchestratorData(Orchestrator data){
-    	OrchestratorDataResource dataResource = new OrchestratorDataResource();
-    	dataResource.setExperimentID(data.getExperiment_ID());
-    	dataResource.setUserName(data.getUserName());
-    	dataResource.setApplicationName(data.getApplicationName());
-    	dataResource.setStatus(data.getStatus());
-    	dataResource.setGfacEPR(data.getGfacEPR());
-    	dataResource.setJobRequest(data.getJobRequest());
-    	dataResource.setSubmittedTime(data.getSubmittedTime());
-    	dataResource.setStatusUpdateTime(data.getStatusUpdateTime());
-    	return dataResource;
-    }
+//    private static Resource createOrchestratorData(Orchestrator data){
+//    	OrchestratorDataResource dataResource = new OrchestratorDataResource();
+//    	dataResource.setExperimentID(data.getExperiment_ID());
+//    	dataResource.setUserName(data.getUserName());
+//    	dataResource.setApplicationName(data.getApplicationName());
+//    	dataResource.setStatus(data.getStatus());
+//    	dataResource.setGfacEPR(data.getGfacEPR());
+//    	dataResource.setJobRequest(data.getJobRequest());
+//    	dataResource.setSubmittedTime(data.getSubmittedTime());
+//    	dataResource.setStatusUpdateTime(data.getStatusUpdateTime());
+//    	return dataResource;
+//    }
 //    public static byte[] getByteArray(String content){
 //        byte[] contentBytes = content.getBytes();
 //        return contentBytes;

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkerResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkerResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkerResource.java
index 32e8941..5f5467f 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkerResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkerResource.java
@@ -79,10 +79,10 @@ public class WorkerResource extends AbstractResource {
 				result=userWorkflowResource;
                 break;
             case EXPERIMENT_METADATA:
-                ExperimentMetadataResource experimentResource = new ExperimentMetadataResource();
-                experimentResource.setExecutionUser(user);
-                experimentResource.setGateway(gateway);
-                result=experimentResource;
+//                ExperimentMetadataResource experimentResource = new ExperimentMetadataResource();
+//                experimentResource.setExecutionUser(user);
+//                experimentResource.setGateway(gateway);
+//                result=experimentResource;
                 break;
 			default:
                 logger.error("Unsupported resource type for worker resource.", new IllegalArgumentException());
@@ -167,28 +167,28 @@ public class WorkerResource extends AbstractResource {
                 result= Utils.getResource(ResourceType.USER_WORKFLOW, userWorkflow);
 	            break;
 			case EXPERIMENT_METADATA:
-                generator = new QueryGenerator(EXPERIMENT_METADATA);
-                generator.setParameter(ExperimentMetadataConstants.EXPERIMENT_ID, name);
-                q = generator.selectQuery(em);
-	            Experiment_Metadata experiment = (Experiment_Metadata) q.getSingleResult();
-                result= Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
-				break;
+//                generator = new QueryGenerator(EXPERIMENT_METADATA);
+//                generator.setParameter(ExperimentMetadataConstants.EXPERIMENT_ID, name);
+//                q = generator.selectQuery(em);
+//	            Experiment_Metadata experiment = (Experiment_Metadata) q.getSingleResult();
+//                result= Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
+//				break;
 			case WORKFLOW_DATA:
-                generator = new QueryGenerator(WORKFLOW_DATA);
-                generator.setParameter(WorkflowDataConstants.WORKFLOW_INSTANCE_ID, name);
-                q = generator.selectQuery(em);
-                Workflow_Data eworkflowData = (Workflow_Data)q.getSingleResult();
-                WorkflowDataResource workflowDataResource = (WorkflowDataResource)Utils.getResource(ResourceType.WORKFLOW_DATA, eworkflowData);
-                result= workflowDataResource;
-                break;
+//                generator = new QueryGenerator(WORKFLOW_DATA);
+//                generator.setParameter(WorkflowDataConstants.WORKFLOW_INSTANCE_ID, name);
+//                q = generator.selectQuery(em);
+//                Workflow_Data eworkflowData = (Workflow_Data)q.getSingleResult();
+//                WorkflowDataResource workflowDataResource = (WorkflowDataResource)Utils.getResource(ResourceType.WORKFLOW_DATA, eworkflowData);
+//                result= workflowDataResource;
+//                break;
             case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
-                q = generator.selectQuery(em);
-                GFac_Job_Data gFacJobData = (GFac_Job_Data)q.getSingleResult();
-                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData);
-                result= gFacJobDataResource;
-                break;
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
+//                q = generator.selectQuery(em);
+//                GFac_Job_Data gFacJobData = (GFac_Job_Data)q.getSingleResult();
+//                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData);
+//                result= gFacJobDataResource;
+//                break;
 			default:
                 logger.error("Unsupported resource type for worker resource.", new IllegalArgumentException());
                 break;
@@ -198,43 +198,43 @@ public class WorkerResource extends AbstractResource {
 		return result;
 	}
 	
-	public List<GFacJobDataResource> getGFacJobs(String serviceDescriptionId, String hostDescriptionId, String applicationDescriptionId){
-		List<GFacJobDataResource> result = new ArrayList<GFacJobDataResource>();
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        QueryGenerator generator;
-        Query q;
-        generator = new QueryGenerator(GFAC_JOB_DATA);
-        generator.setParameter(GFacJobDataConstants.SERVICE_DESC_ID, serviceDescriptionId);
-        generator.setParameter(GFacJobDataConstants.HOST_DESC_ID, hostDescriptionId);
-        generator.setParameter(GFacJobDataConstants.APP_DESC_ID, applicationDescriptionId);
-        q = generator.selectQuery(em);
-        for (Object o : q.getResultList()) {
-            GFac_Job_Data gFacJobData = (GFac_Job_Data)o;
-            result.add((GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData));
-        }
-        em.getTransaction().commit();
-        em.close();
-		return result;
-	}
-	
-	public List<GFacJobStatusResource> getGFacJobStatuses(String jobId){
-		List<GFacJobStatusResource> resourceList = new ArrayList<GFacJobStatusResource>();
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        QueryGenerator generator;
-        Query q;
-        generator = new QueryGenerator(GFAC_JOB_STATUS);
-        generator.setParameter(GFacJobStatusConstants.LOCAL_JOB_ID, jobId);
-        q = generator.selectQuery(em);
-        for (Object result : q.getResultList()) {
-            GFac_Job_Status gFacJobStatus = (GFac_Job_Status) result;
-            GFacJobStatusResource gFacJobStatusResource =
-                    (GFacJobStatusResource)Utils.getResource(ResourceType.GFAC_JOB_STATUS, gFacJobStatus);
-            resourceList.add(gFacJobStatusResource);
-        }
-        return resourceList;
-	}
+//	public List<GFacJobDataResource> getGFacJobs(String serviceDescriptionId, String hostDescriptionId, String applicationDescriptionId){
+//		List<GFacJobDataResource> result = new ArrayList<GFacJobDataResource>();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        QueryGenerator generator;
+//        Query q;
+//        generator = new QueryGenerator(GFAC_JOB_DATA);
+//        generator.setParameter(GFacJobDataConstants.SERVICE_DESC_ID, serviceDescriptionId);
+//        generator.setParameter(GFacJobDataConstants.HOST_DESC_ID, hostDescriptionId);
+//        generator.setParameter(GFacJobDataConstants.APP_DESC_ID, applicationDescriptionId);
+//        q = generator.selectQuery(em);
+//        for (Object o : q.getResultList()) {
+//            GFac_Job_Data gFacJobData = (GFac_Job_Data)o;
+//            result.add((GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFacJobData));
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//		return result;
+//	}
+//
+//	public List<GFacJobStatusResource> getGFacJobStatuses(String jobId){
+//		List<GFacJobStatusResource> resourceList = new ArrayList<GFacJobStatusResource>();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        QueryGenerator generator;
+//        Query q;
+//        generator = new QueryGenerator(GFAC_JOB_STATUS);
+//        generator.setParameter(GFacJobStatusConstants.LOCAL_JOB_ID, jobId);
+//        q = generator.selectQuery(em);
+//        for (Object result : q.getResultList()) {
+//            GFac_Job_Status gFacJobStatus = (GFac_Job_Status) result;
+//            GFacJobStatusResource gFacJobStatusResource =
+//                    (GFacJobStatusResource)Utils.getResource(ResourceType.GFAC_JOB_STATUS, gFacJobStatus);
+//            resourceList.add(gFacJobStatusResource);
+//        }
+//        return resourceList;
+//	}
 
     /**
      *
@@ -275,16 +275,16 @@ public class WorkerResource extends AbstractResource {
 	            }
 	            break;
 			case EXPERIMENT_METADATA:
-                generator = new QueryGenerator(EXPERIMENT_METADATA);
-                generator.setParameter(ExperimentMetadataConstants.GATEWAY_NAME, gateway.getGatewayName());
-                generator.setParameter(ExperimentMetadataConstants.EXECUTION_USER, user);
-                q = generator.selectQuery(em);
-	            for (Object o : q.getResultList()) {
-	            	Experiment_Metadata experiment = (Experiment_Metadata) o;
-	            	ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
-		            result.add(experimentResource);
-	            }
-	            break;
+//                generator = new QueryGenerator(EXPERIMENT_METADATA);
+//                generator.setParameter(ExperimentMetadataConstants.GATEWAY_NAME, gateway.getGatewayName());
+//                generator.setParameter(ExperimentMetadataConstants.EXECUTION_USER, user);
+//                q = generator.selectQuery(em);
+//	            for (Object o : q.getResultList()) {
+//	            	Experiment_Metadata experiment = (Experiment_Metadata) o;
+//	            	ExperimentMetadataResource experimentResource = (ExperimentMetadataResource)Utils.getResource(ResourceType.EXPERIMENT_METADATA, experiment);
+//		            result.add(experimentResource);
+//	            }
+//	            break;
 			default:
                 logger.error("Unsupported resource type for worker resource.", new IllegalArgumentException());
                 break;
@@ -476,26 +476,26 @@ public class WorkerResource extends AbstractResource {
      * @param name experiment name
      * @return experiment resource
      */
-    public ExperimentMetadataResource getExperiment(String name){
-		return (ExperimentMetadataResource)get(ResourceType.EXPERIMENT_METADATA, name);
-	}
-    
-    public GFacJobDataResource getGFacJob(String jobId){
-    	return (GFacJobDataResource)get(ResourceType.GFAC_JOB_DATA,jobId);
-    }
+//    public ExperimentMetadataResource getExperiment(String name){
+//		return (ExperimentMetadataResource)get(ResourceType.EXPERIMENT_METADATA, name);
+//	}
+//
+//    public GFacJobDataResource getGFacJob(String jobId){
+//    	return (GFacJobDataResource)get(ResourceType.GFAC_JOB_DATA,jobId);
+//    }
 
     /**
      *
      * @return list of experiments for the user
      */
-	public List<ExperimentMetadataResource> getExperiments(){
-		List<ExperimentMetadataResource> result=new ArrayList<ExperimentMetadataResource>();
-		List<Resource> list = get(ResourceType.EXPERIMENT_METADATA);
-		for (Resource resource : list) {
-			result.add((ExperimentMetadataResource) resource);
-		}
-		return result;
-	}
+//	public List<ExperimentMetadataResource> getExperiments(){
+//		List<ExperimentMetadataResource> result=new ArrayList<ExperimentMetadataResource>();
+//		List<Resource> list = get(ResourceType.EXPERIMENT_METADATA);
+//		for (Resource resource : list) {
+//			result.add((ExperimentMetadataResource) resource);
+//		}
+//		return result;
+//	}
 
     /**
      *

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkflowDataResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkflowDataResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkflowDataResource.java
index 23b9858..403a180 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkflowDataResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/WorkflowDataResource.java
@@ -97,27 +97,27 @@ public class WorkflowDataResource extends AbstractResource{
     }
 
     public Resource create(ResourceType type) {
-       switch (type){
-           case NODE_DATA:
-               NodeDataResource nodeDataResource = new NodeDataResource();
-               nodeDataResource.setWorkflowDataResource(this);
-               return nodeDataResource;
-           case GRAM_DATA:
-               GramDataResource gramDataResource = new GramDataResource();
-               gramDataResource.setWorkflowDataResource(this);
-               return gramDataResource;
-           case EXECUTION_ERROR:
-               ExecutionErrorResource executionErrorResource = new ExecutionErrorResource();
-               executionErrorResource.setWorkflowDataResource(this);
-               return executionErrorResource;
-           case GFAC_JOB_DATA:
-               GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
-               gFacJobDataResource.setWorkflowDataResource(this);
-               return gFacJobDataResource;
-           default:
+//       switch (type){
+//           case NODE_DATA:
+////               NodeDataResource nodeDataResource = new NodeDataResource();
+////               nodeDataResource.setWorkflowDataResource(this);
+////               return nodeDataResource;
+//           case GRAM_DATA:
+//               GramDataResource gramDataResource = new GramDataResource();
+//               gramDataResource.setWorkflowDataResource(this);
+//               return gramDataResource;
+//           case EXECUTION_ERROR:
+//               ExecutionErrorResource executionErrorResource = new ExecutionErrorResource();
+//               executionErrorResource.setWorkflowDataResource(this);
+//               return executionErrorResource;
+//           case GFAC_JOB_DATA:
+//               GFacJobDataResource gFacJobDataResource = new GFacJobDataResource();
+//               gFacJobDataResource.setWorkflowDataResource(this);
+//               return gFacJobDataResource;
+//           default:
                logger.error("Unsupported resource type for workflow data resource.", new IllegalArgumentException());
                throw new IllegalArgumentException("Unsupported resource type for workflow data resource.");
-       }
+//       }
     }
 
     public void remove(ResourceType type, Object name) {
@@ -161,55 +161,55 @@ public class WorkflowDataResource extends AbstractResource{
     }
 
     public Resource get(ResourceType type, Object name) {
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        QueryGenerator generator;
-        Query q;
-        switch (type) {
-            case NODE_DATA:
-                generator = new QueryGenerator(NODE_DATA);
-                generator.setParameter(NodeDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
-                generator.setParameter(NodeDataConstants.NODE_ID, name);
-                q = generator.selectQuery(em);
-                Node_Data enodeDeata = (Node_Data)q.getSingleResult();
-                NodeDataResource nodeDataResource = (NodeDataResource)Utils.getResource(ResourceType.NODE_DATA, enodeDeata);
-                em.getTransaction().commit();
-                em.close();
-                return nodeDataResource;
-            case GRAM_DATA:
-                generator = new QueryGenerator(GRAM_DATA);
-                generator.setParameter(GramDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
-                generator.setParameter(GramDataConstants.NODE_ID, name);
-                q = generator.selectQuery(em);
-                Gram_Data egramData = (Gram_Data)q.getSingleResult();
-                GramDataResource gramDataResource = (GramDataResource)Utils.getResource(ResourceType.GRAM_DATA, egramData);
-                em.getTransaction().commit();
-                em.close();
-                return gramDataResource;
-            case EXECUTION_ERROR:
-                generator = new QueryGenerator(EXECUTION_ERROR);
-                generator.setParameter(ExecutionErrorConstants.ERROR_ID, name);
-                q = generator.selectQuery(em);
-                Execution_Error execution_error = (Execution_Error)q.getSingleResult();
-                ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, execution_error);
-                em.getTransaction().commit();
-                em.close();
-                return executionErrorResource;
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
-                q = generator.selectQuery(em);
-                GFac_Job_Data gFac_job_data = (GFac_Job_Data)q.getSingleResult();
-                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
-                em.getTransaction().commit();
-                em.close();
-                return gFacJobDataResource;
-            default:
-                em.getTransaction().commit();
-                em.close();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        QueryGenerator generator;
+//        Query q;
+//        switch (type) {
+//            case NODE_DATA:
+//                generator = new QueryGenerator(NODE_DATA);
+//                generator.setParameter(NodeDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
+//                generator.setParameter(NodeDataConstants.NODE_ID, name);
+//                q = generator.selectQuery(em);
+//                Node_Data enodeDeata = (Node_Data)q.getSingleResult();
+//                NodeDataResource nodeDataResource = (NodeDataResource)Utils.getResource(ResourceType.NODE_DATA, enodeDeata);
+//                em.getTransaction().commit();
+//                em.close();
+//                return nodeDataResource;
+//            case GRAM_DATA:
+//                generator = new QueryGenerator(GRAM_DATA);
+//                generator.setParameter(GramDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
+//                generator.setParameter(GramDataConstants.NODE_ID, name);
+//                q = generator.selectQuery(em);
+//                Gram_Data egramData = (Gram_Data)q.getSingleResult();
+//                GramDataResource gramDataResource = (GramDataResource)Utils.getResource(ResourceType.GRAM_DATA, egramData);
+//                em.getTransaction().commit();
+//                em.close();
+//                return gramDataResource;
+//            case EXECUTION_ERROR:
+//                generator = new QueryGenerator(EXECUTION_ERROR);
+//                generator.setParameter(ExecutionErrorConstants.ERROR_ID, name);
+//                q = generator.selectQuery(em);
+//                Execution_Error execution_error = (Execution_Error)q.getSingleResult();
+//                ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, execution_error);
+//                em.getTransaction().commit();
+//                em.close();
+//                return executionErrorResource;
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.LOCAL_JOB_ID, name);
+//                q = generator.selectQuery(em);
+//                GFac_Job_Data gFac_job_data = (GFac_Job_Data)q.getSingleResult();
+//                GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
+//                em.getTransaction().commit();
+//                em.close();
+//                return gFacJobDataResource;
+//            default:
+//                em.getTransaction().commit();
+//                em.close();
                 logger.error("Unsupported resource type for workflow data resource.", new IllegalArgumentException());
                 throw new IllegalArgumentException("Unsupported resource type for workflow data resource.");
-        }
+//        }
     }
 
     public List<Resource> get(ResourceType type) {
@@ -219,70 +219,70 @@ public class WorkflowDataResource extends AbstractResource{
         Query q;
         QueryGenerator generator;
         List<?> results;
-        switch (type){
-            case NODE_DATA:
-                generator = new QueryGenerator(NODE_DATA);
-                generator.setParameter(NodeDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Node_Data nodeData = (Node_Data)result;
-                        NodeDataResource nodeDataResource = (NodeDataResource)Utils.getResource(ResourceType.NODE_DATA,nodeData);
-                        resourceList.add(nodeDataResource);
-
-                    }
-                }
-                break;
-            case GRAM_DATA:
-                generator = new QueryGenerator(GRAM_DATA);
-                generator.setParameter(GramDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Gram_Data gramData = (Gram_Data)result;
-                        GramDataResource gramDataResource = (GramDataResource)Utils.getResource(ResourceType.GRAM_DATA, gramData);
-                        resourceList.add(gramDataResource);
-                    }
-                }
-                break;
-            case EXECUTION_ERROR:
-                generator = new QueryGenerator(EXECUTION_ERROR);
-                generator.setParameter(ExecutionErrorConstants.WORKFLOW_ID, workflowInstanceID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        Execution_Error executionError = (Execution_Error)result;
-                        ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, executionError);
-                        resourceList.add(executionErrorResource);
-                    }
-                }
-                break;
-            case GFAC_JOB_DATA:
-                generator = new QueryGenerator(GFAC_JOB_DATA);
-                generator.setParameter(GFacJobDataConstants.EXPERIMENT_ID, experimentID);
-                generator.setParameter(GFacJobDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
-                q = generator.selectQuery(em);
-                results = q.getResultList();
-                if (results.size() != 0) {
-                    for (Object result : results) {
-                        GFac_Job_Data gFac_job_data = (GFac_Job_Data)result;
-                        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
-                        resourceList.add(gFacJobDataResource);
-                    }
-                }
-                break;
-            default:
-                em.getTransaction().commit();
-                em.close();
+//        switch (type){
+//            case NODE_DATA:
+//                generator = new QueryGenerator(NODE_DATA);
+//                generator.setParameter(NodeDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Node_Data nodeData = (Node_Data)result;
+//                        NodeDataResource nodeDataResource = (NodeDataResource)Utils.getResource(ResourceType.NODE_DATA,nodeData);
+//                        resourceList.add(nodeDataResource);
+//
+//                    }
+//                }
+//                break;
+//            case GRAM_DATA:
+//                generator = new QueryGenerator(GRAM_DATA);
+//                generator.setParameter(GramDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Gram_Data gramData = (Gram_Data)result;
+//                        GramDataResource gramDataResource = (GramDataResource)Utils.getResource(ResourceType.GRAM_DATA, gramData);
+//                        resourceList.add(gramDataResource);
+//                    }
+//                }
+//                break;
+//            case EXECUTION_ERROR:
+//                generator = new QueryGenerator(EXECUTION_ERROR);
+//                generator.setParameter(ExecutionErrorConstants.WORKFLOW_ID, workflowInstanceID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        Execution_Error executionError = (Execution_Error)result;
+//                        ExecutionErrorResource executionErrorResource = (ExecutionErrorResource)Utils.getResource(ResourceType.EXECUTION_ERROR, executionError);
+//                        resourceList.add(executionErrorResource);
+//                    }
+//                }
+//                break;
+//            case GFAC_JOB_DATA:
+//                generator = new QueryGenerator(GFAC_JOB_DATA);
+//                generator.setParameter(GFacJobDataConstants.EXPERIMENT_ID, experimentID);
+//                generator.setParameter(GFacJobDataConstants.WORKFLOW_INSTANCE_ID, workflowInstanceID);
+//                q = generator.selectQuery(em);
+//                results = q.getResultList();
+//                if (results.size() != 0) {
+//                    for (Object result : results) {
+//                        GFac_Job_Data gFac_job_data = (GFac_Job_Data)result;
+//                        GFacJobDataResource gFacJobDataResource = (GFacJobDataResource)Utils.getResource(ResourceType.GFAC_JOB_DATA, gFac_job_data);
+//                        resourceList.add(gFacJobDataResource);
+//                    }
+//                }
+//                break;
+//            default:
+//                em.getTransaction().commit();
+//                em.close();
                 logger.error("Unsupported resource type for workflow data resource.", new IllegalArgumentException());
                 throw new IllegalArgumentException("Unsupported resource type for workflow data resource.");
-        }
-        em.getTransaction().commit();
-        em.close();
-        return resourceList;
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//        return resourceList;
     }
 
     public List<Resource> getGFacJobs(){
@@ -290,36 +290,36 @@ public class WorkflowDataResource extends AbstractResource{
     }
     
     public void save() {
-        if(lastUpdatedTime == null){
-            java.util.Date date= new java.util.Date();
-            lastUpdatedTime = new Timestamp(date.getTime());
-        }
-        EntityManager em = ResourceUtils.getEntityManager();
-        Workflow_Data existingWFData = em.find(Workflow_Data.class, workflowInstanceID);
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Workflow_Data workflowData = new Workflow_Data();
-        Experiment_Metadata expData = em.find(Experiment_Metadata.class, experimentID);
-        workflowData.setExperiment_data(expData);
-        workflowData.setWorkflow_instanceID(workflowInstanceID);
-        workflowData.setLast_update_time(lastUpdatedTime);
-        workflowData.setStart_time(startTime);
-        workflowData.setTemplate_name(templateName);
-        workflowData.setStatus(status);
-        if(existingWFData != null){
-            existingWFData.setExperiment_data(expData);
-            existingWFData.setLast_update_time(lastUpdatedTime);
-            existingWFData.setStart_time(startTime);
-            existingWFData.setStatus(status);
-            existingWFData.setTemplate_name(templateName);
-            workflowData = em.merge(existingWFData);
-        }else {
-            em.persist(workflowData);
-        }
-        em.getTransaction().commit();
-        em.close();
+//        if(lastUpdatedTime == null){
+//            java.util.Date date= new java.util.Date();
+//            lastUpdatedTime = new Timestamp(date.getTime());
+//        }
+//        EntityManager em = ResourceUtils.getEntityManager();
+////        Workflow_Data existingWFData = em.find(Workflow_Data.class, workflowInstanceID);
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+////        Workflow_Data workflowData = new Workflow_Data();
+////        Experiment_Metadata expData = em.find(Experiment_Metadata.class, experimentID);
+////        workflowData.setExperiment_data(expData);
+////        workflowData.setWorkflow_instanceID(workflowInstanceID);
+////        workflowData.setLast_update_time(lastUpdatedTime);
+////        workflowData.setStart_time(startTime);
+////        workflowData.setTemplate_name(templateName);
+////        workflowData.setStatus(status);
+//        if(existingWFData != null){
+//            existingWFData.setExperiment_data(expData);
+//            existingWFData.setLast_update_time(lastUpdatedTime);
+//            existingWFData.setStart_time(startTime);
+//            existingWFData.setStatus(status);
+//            existingWFData.setTemplate_name(templateName);
+//            workflowData = em.merge(existingWFData);
+//        }else {
+//            em.persist(workflowData);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
     }
     
     public boolean isNodeExists(String nodeId){
@@ -330,33 +330,33 @@ public class WorkflowDataResource extends AbstractResource{
     	return isExists(ResourceType.GRAM_DATA, nodeId);
     }
     
-    public NodeDataResource getNodeData(String nodeId){
-		return (NodeDataResource) get(ResourceType.NODE_DATA,nodeId);
-    }
-    
-    public GramDataResource getGramData(String nodeId){
-    	return (GramDataResource) get(ResourceType.GRAM_DATA,nodeId);
-    }
-    
-    public List<NodeDataResource> getNodeData(){
-    	return getResourceList(get(ResourceType.NODE_DATA),NodeDataResource.class);
-    }
-    
-    public List<GramDataResource> getGramData(){
-    	return getResourceList(get(ResourceType.GRAM_DATA),GramDataResource.class);
-    }
-
-    public NodeDataResource createNodeData(String nodeId){
-    	NodeDataResource data=(NodeDataResource)create(ResourceType.NODE_DATA);
-    	data.setNodeID(nodeId);
-    	return data;
-    }
-    
-    public GramDataResource createGramData(String nodeId){
-    	GramDataResource data=(GramDataResource)create(ResourceType.GRAM_DATA);
-    	data.setNodeID(nodeId);
-    	return data;
-    }
+//    public NodeDataResource getNodeData(String nodeId){
+//		return (NodeDataResource) get(ResourceType.NODE_DATA,nodeId);
+//    }
+//
+//    public GramDataResource getGramData(String nodeId){
+//    	return (GramDataResource) get(ResourceType.GRAM_DATA,nodeId);
+//    }
+//
+//    public List<NodeDataResource> getNodeData(){
+//    	return getResourceList(get(ResourceType.NODE_DATA),NodeDataResource.class);
+//    }
+//
+//    public List<GramDataResource> getGramData(){
+//    	return getResourceList(get(ResourceType.GRAM_DATA),GramDataResource.class);
+//    }
+//
+//    public NodeDataResource createNodeData(String nodeId){
+//    	NodeDataResource data=(NodeDataResource)create(ResourceType.NODE_DATA);
+//    	data.setNodeID(nodeId);
+//    	return data;
+//    }
+//
+//    public GramDataResource createGramData(String nodeId){
+//    	GramDataResource data=(GramDataResource)create(ResourceType.GRAM_DATA);
+//    	data.setNodeID(nodeId);
+//    	return data;
+//    }
     
     public void removeNodeData(String nodeId){
     	remove(ResourceType.NODE_DATA, nodeId);

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
index 27da417..d5c425b 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
@@ -1,114 +1,114 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.persistance.registry.jpa.utils;
-
-import org.apache.airavata.model.experiment.*;
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.resources.ExperimentConfigDataResource;
-import org.apache.airavata.persistance.registry.jpa.resources.ExperimentInputResource;
-import org.apache.airavata.persistance.registry.jpa.resources.ExperimentMetadataResource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class ThriftDataModelConversion {
-    private final static Logger logger = LoggerFactory.getLogger(ThriftDataModelConversion.class);
-
-    public static BasicMetadata getBasicMetadata (ExperimentMetadataResource exmetadata){
-        BasicMetadata bsmd = new BasicMetadata();
-        bsmd.setUserName(exmetadata.getExecutionUser());
-        bsmd.setShareExperimentPublicly(exmetadata.isShareExp());
-        bsmd.setExperimentDescription(exmetadata.getDescription());
-        bsmd.setExperimentName(exmetadata.getExperimentName());
-        return bsmd;
-    }
-
-    public static ConfigurationData getConfigurationData (ExperimentConfigDataResource excd){
-        ConfigurationData configData = new ConfigurationData();
-        configData.setBasicMetadata(getBasicMetadata(excd.getExMetadata()));
-        configData.setApplicationId(excd.getApplicationID());
-        configData.setApplicationVersion(excd.getApplicationVersion());
-        configData.setWorkflowTemplateId(excd.getWorkflowTemplateId());
-        configData.setWorklfowTemplateVersion(excd.getWorkflowTemplateVersion());
-        configData.setExperimentInputs(getExperimentInputs(excd.getExMetadata()));
-        configData.setAdvanceInputDataHandling(getAdvanceInputDataHandling(excd));
-        configData.setAdvanceOutputDataHandling(getAdvanceOutputDataHandling(excd));
-        configData.setComputationalResourceScheduling(getComputationalResourceScheduling(excd));
-        configData.setQosParams(getQOSParams(excd));
-        return configData;
-    }
-
-    public static Map<String, String> getExperimentInputs (ExperimentMetadataResource exmdr){
-        List<Resource> resources = exmdr.get(ResourceType.EXPERIMENT_INPUT);
-        Map<String, String> exInputs = new HashMap<String, String>();
-        for (Resource resource : resources){
-            ExperimentInputResource exInput = (ExperimentInputResource)resource;
-            exInputs.put(exInput.getExperimentKey(), exInput.getValue());
-        }
-        return exInputs;
-    }
-
-    public static ComputationalResourceScheduling getComputationalResourceScheduling (ExperimentConfigDataResource excdr){
-        ComputationalResourceScheduling scheduling = new ComputationalResourceScheduling();
-        scheduling.setAiravataAutoSchedule(excdr.isAiravataAutoSchedule());
-        scheduling.setOverrideManualScheduledParams(excdr.isOverrideManualSchedule());
-        scheduling.setResourceHostId(excdr.getResourceHostID());
-        scheduling.setTotalCPUCount(excdr.getCpuCount());
-        scheduling.setNodeCount(excdr.getNodeCount());
-        scheduling.setNumberOfThreads(excdr.getNumberOfThreads());
-        scheduling.setQueueName(excdr.getQueueName());
-        scheduling.setWallTimeLimit(excdr.getWallTimeLimit());
-        scheduling.setJobStartTime((int)excdr.getJobStartTime().getTime());
-        scheduling.setTotalPhysicalMemory(excdr.getPhysicalMemory());
-        scheduling.setComputationalProjectAccount(excdr.getProjectAccount());
-        return scheduling;
-    }
-
-    public static AdvancedInputDataHandling getAdvanceInputDataHandling(ExperimentConfigDataResource excd){
-        AdvancedInputDataHandling adih = new AdvancedInputDataHandling();
-        adih.setStageInputFilesToWorkingDir(excd.isStageInputsToWDir());
-        adih.setWorkingDirectoryParent(excd.getWorkingDirParent());
-        adih.setUniqueWorkingDirectory(excd.getWorkingDir());
-        adih.setCleanUpWorkingDirAfterJob(excd.isCleanAfterJob());
-        return adih;
-    }
-
-    public static AdvancedOutputDataHandling getAdvanceOutputDataHandling(ExperimentConfigDataResource excd){
-        AdvancedOutputDataHandling outputDataHandling = new AdvancedOutputDataHandling();
-        outputDataHandling.setOutputdataDir(excd.getOutputDataDir());
-        outputDataHandling.setDataRegistryURL(excd.getDataRegURL());
-        outputDataHandling.setPersistOutputData(excd.isPersistOutputData());
-        return outputDataHandling;
-    }
-
-    public static QualityOfServiceParams getQOSParams (ExperimentConfigDataResource excd){
-        QualityOfServiceParams qosParams = new QualityOfServiceParams();
-        qosParams.setStartExecutionAt(excd.getStartExecutionAt());
-        qosParams.setExecuteBefore(excd.getExecuteBefore());
-        qosParams.setNumberofRetries(excd.getNumberOfRetries());
-        return qosParams;
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.persistance.registry.jpa.utils;
+//
+//import org.apache.airavata.model.experiment.*;
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentConfigDataResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentInputResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentMetadataResource;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.util.HashMap;
+//import java.util.List;
+//import java.util.Map;
+//
+//public class ThriftDataModelConversion {
+//    private final static Logger logger = LoggerFactory.getLogger(ThriftDataModelConversion.class);
+//
+//    public static BasicMetadata getBasicMetadata (ExperimentMetadataResource exmetadata){
+//        BasicMetadata bsmd = new BasicMetadata();
+//        bsmd.setUserName(exmetadata.getExecutionUser());
+//        bsmd.setShareExperimentPublicly(exmetadata.isShareExp());
+//        bsmd.setExperimentDescription(exmetadata.getDescription());
+//        bsmd.setExperimentName(exmetadata.getExperimentName());
+//        return bsmd;
+//    }
+//
+//    public static ConfigurationData getConfigurationData (ExperimentConfigDataResource excd){
+//        ConfigurationData configData = new ConfigurationData();
+//        configData.setBasicMetadata(getBasicMetadata(excd.getExMetadata()));
+//        configData.setApplicationId(excd.getApplicationID());
+//        configData.setApplicationVersion(excd.getApplicationVersion());
+//        configData.setWorkflowTemplateId(excd.getWorkflowTemplateId());
+//        configData.setWorklfowTemplateVersion(excd.getWorkflowTemplateVersion());
+//        configData.setExperimentInputs(getExperimentInputs(excd.getExMetadata()));
+//        configData.setAdvanceInputDataHandling(getAdvanceInputDataHandling(excd));
+//        configData.setAdvanceOutputDataHandling(getAdvanceOutputDataHandling(excd));
+//        configData.setComputationalResourceScheduling(getComputationalResourceScheduling(excd));
+//        configData.setQosParams(getQOSParams(excd));
+//        return configData;
+//    }
+//
+//    public static Map<String, String> getExperimentInputs (ExperimentMetadataResource exmdr){
+//        List<Resource> resources = exmdr.get(ResourceType.EXPERIMENT_INPUT);
+//        Map<String, String> exInputs = new HashMap<String, String>();
+//        for (Resource resource : resources){
+//            ExperimentInputResource exInput = (ExperimentInputResource)resource;
+//            exInputs.put(exInput.getExperimentKey(), exInput.getValue());
+//        }
+//        return exInputs;
+//    }
+//
+//    public static ComputationalResourceScheduling getComputationalResourceScheduling (ExperimentConfigDataResource excdr){
+//        ComputationalResourceScheduling scheduling = new ComputationalResourceScheduling();
+//        scheduling.setAiravataAutoSchedule(excdr.isAiravataAutoSchedule());
+//        scheduling.setOverrideManualScheduledParams(excdr.isOverrideManualSchedule());
+//        scheduling.setResourceHostId(excdr.getResourceHostID());
+//        scheduling.setTotalCPUCount(excdr.getCpuCount());
+//        scheduling.setNodeCount(excdr.getNodeCount());
+//        scheduling.setNumberOfThreads(excdr.getNumberOfThreads());
+//        scheduling.setQueueName(excdr.getQueueName());
+//        scheduling.setWallTimeLimit(excdr.getWallTimeLimit());
+//        scheduling.setJobStartTime((int)excdr.getJobStartTime().getTime());
+//        scheduling.setTotalPhysicalMemory(excdr.getPhysicalMemory());
+//        scheduling.setComputationalProjectAccount(excdr.getProjectAccount());
+//        return scheduling;
+//    }
+//
+//    public static AdvancedInputDataHandling getAdvanceInputDataHandling(ExperimentConfigDataResource excd){
+//        AdvancedInputDataHandling adih = new AdvancedInputDataHandling();
+//        adih.setStageInputFilesToWorkingDir(excd.isStageInputsToWDir());
+//        adih.setWorkingDirectoryParent(excd.getWorkingDirParent());
+//        adih.setUniqueWorkingDirectory(excd.getWorkingDir());
+//        adih.setCleanUpWorkingDirAfterJob(excd.isCleanAfterJob());
+//        return adih;
+//    }
+//
+//    public static AdvancedOutputDataHandling getAdvanceOutputDataHandling(ExperimentConfigDataResource excd){
+//        AdvancedOutputDataHandling outputDataHandling = new AdvancedOutputDataHandling();
+//        outputDataHandling.setOutputdataDir(excd.getOutputDataDir());
+//        outputDataHandling.setDataRegistryURL(excd.getDataRegURL());
+//        outputDataHandling.setPersistOutputData(excd.isPersistOutputData());
+//        return outputDataHandling;
+//    }
+//
+//    public static QualityOfServiceParams getQOSParams (ExperimentConfigDataResource excd){
+//        QualityOfServiceParams qosParams = new QualityOfServiceParams();
+//        qosParams.setStartExecutionAt(excd.getStartExecutionAt());
+//        qosParams.setExecuteBefore(excd.getExecuteBefore());
+//        qosParams.setNumberofRetries(excd.getNumberOfRetries());
+//        return qosParams;
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/resources/META-INF/persistence.xml b/modules/registry/airavata-jpa-registry/src/main/resources/META-INF/persistence.xml
index c9cc9e0..10fbee4 100644
--- a/modules/registry/airavata-jpa-registry/src/main/resources/META-INF/persistence.xml
+++ b/modules/registry/airavata-jpa-registry/src/main/resources/META-INF/persistence.xml
@@ -33,18 +33,24 @@
         <class>org.apache.airavata.persistance.registry.jpa.model.Host_Descriptor</class>
         <class>org.apache.airavata.persistance.registry.jpa.model.Service_Descriptor</class>
         <class>org.apache.airavata.persistance.registry.jpa.model.Application_Descriptor</class>
-        <class>org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata</class>
-        <class>org.apache.airavata.persistance.registry.jpa.model.Experiment_Configuration_Data</class>
-        <class>org.apache.airavata.persistance.registry.jpa.model.Experiment_Summary</class>
+        <class>org.apache.airavata.persistance.registry.jpa.model.Experiment</class>
         <class>org.apache.airavata.persistance.registry.jpa.model.Experiment_Input</class>
         <class>org.apache.airavata.persistance.registry.jpa.model.Experiment_Output</class>
-		<class>org.apache.airavata.persistance.registry.jpa.model.Workflow_Data</class>
-		<class>org.apache.airavata.persistance.registry.jpa.model.Node_Data</class>
-		<class>org.apache.airavata.persistance.registry.jpa.model.Gram_Data</class>
-		<class>org.apache.airavata.persistance.registry.jpa.model.Execution_Error</class>
-		<class>org.apache.airavata.persistance.registry.jpa.model.GFac_Job_Data</class>
-		<class>org.apache.airavata.persistance.registry.jpa.model.GFac_Job_Status</class>
-		<class>org.apache.airavata.persistance.registry.jpa.model.Orchestrator</class>
+        <class>org.apache.airavata.persistance.registry.jpa.model.WorkflowNodeDetail</class>
+        <class>org.apache.airavata.persistance.registry.jpa.model.TaskDetail</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.ErrorDetails</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.ApplicationInput</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.ApplicationOutput</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.NodeInput</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.NodeOutput</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.JobDetail</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.DataTransferDetail</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.Status</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.ExperimentConfigData</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.Computational_Resource_Scheduling</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.AdvancedInputDataHandling</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.AdvancedOutputDataHandling</class>
+		<class>org.apache.airavata.persistance.registry.jpa.model.QosParam</class>
         <exclude-unlisted-classes>true</exclude-unlisted-classes>
         <!--properties>
             <property name="openjpa.ConnectionURL"

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/resources/registry-derby.sql
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/resources/registry-derby.sql b/modules/registry/airavata-jpa-registry/src/main/resources/registry-derby.sql
index 37d746f..df9fc95 100644
--- a/modules/registry/airavata-jpa-registry/src/main/resources/registry-derby.sql
+++ b/modules/registry/airavata-jpa-registry/src/main/resources/registry-derby.sql
@@ -165,51 +165,6 @@ CREATE TABLE EXPERIMENT_OUTPUTS
         FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE
 );
 
-CREATE TABLE COMPUTATIONAL_RESOURCE_SCHEDULING
-(
-        RESOURCE_SCHEDULING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
-        EXPERIMENT_ID VARCHAR(255),
-        TASK_ID VARCHAR(255),
-        RESOURCE_HOST_ID VARCHAR(255),
-        CPU_COUNT INTEGER,
-        NODE_COUNT INTEGER,
-        NO_OF_THREADS INTEGER,
-        QUEUE_NAME VARCHAR(255),
-        WALLTIME_LIMIT INTEGER,
-        JOB_START_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
-        TOTAL_PHYSICAL_MEMORY INTEGER,
-        COMPUTATIONAL_PROJECT_ACCOUNT VARCHAR(255),
-        PRIMARY KEY(RESOURCE_SCHEDULING_ID),
-        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
-        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
-);
-
-CREATE TABLE ADVANCE_INPUT_DATA_HANDLING
-(
-       INPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
-       EXPERIMENT_ID VARCHAR(255),
-       TASK_ID VARCHAR(255),
-       WORKING_DIR_PARENT VARCHAR(255),
-       UNIQUE_WORKING_DIR VARCHAR(255),
-       STAGE_INPUT_FILES_TO_WORKING_DIR SMALLINT,
-       CLEAN_AFTER_JOB SMALLINT,
-       PRIMARY KEY(INPUT_DATA_HANDLING_ID),
-       FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
-       FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
-);
-
-CREATE TABLE ADVANCE_OUTPUT_DATA_HANDLING
-(
-       OUTPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
-       EXPERIMENT_ID VARCHAR(255),
-       TASK_ID VARCHAR(255),
-       OUTPUT_DATA_DIR VARCHAR(255),
-       DATA_REG_URL VARCHAR (255),
-       PERSIST_OUTPUT_DATA SMALLINT,
-       PRIMARY KEY(OUTPUT_DATA_HANDLING_ID),
-       FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
-       FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
-);
 
 CREATE TABLE WORKFLOW_NODE_DETAIL
 (
@@ -330,6 +285,7 @@ CREATE TABLE STATUS
         FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
         FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE,
         FOREIGN KEY (NODE_INSTANCE_ID) REFERENCES WORKFLOW_NODE_DETAIL(NODE_INSTANCE_ID) ON DELETE CASCADE,
+        FOREIGN KEY (TRANSFER_ID) REFERENCES DATA_TRANSFER_DETAIL(TRANSFER_ID) ON DELETE CASCADE
 );
 
 CREATE TABLE CONFIG_DATA
@@ -341,6 +297,52 @@ CREATE TABLE CONFIG_DATA
         PRIMARY KEY(EXPERIMENT_ID)
 );
 
+CREATE TABLE COMPUTATIONAL_RESOURCE_SCHEDULING
+(
+        RESOURCE_SCHEDULING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+        EXPERIMENT_ID VARCHAR(255),
+        TASK_ID VARCHAR(255),
+        RESOURCE_HOST_ID VARCHAR(255),
+        CPU_COUNT INTEGER,
+        NODE_COUNT INTEGER,
+        NO_OF_THREADS INTEGER,
+        QUEUE_NAME VARCHAR(255),
+        WALLTIME_LIMIT INTEGER,
+        JOB_START_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',
+        TOTAL_PHYSICAL_MEMORY INTEGER,
+        COMPUTATIONAL_PROJECT_ACCOUNT VARCHAR(255),
+        PRIMARY KEY(RESOURCE_SCHEDULING_ID),
+        FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+        FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE ADVANCE_INPUT_DATA_HANDLING
+(
+       INPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+       EXPERIMENT_ID VARCHAR(255),
+       TASK_ID VARCHAR(255),
+       WORKING_DIR_PARENT VARCHAR(255),
+       UNIQUE_WORKING_DIR VARCHAR(255),
+       STAGE_INPUT_FILES_TO_WORKING_DIR SMALLINT,
+       CLEAN_AFTER_JOB SMALLINT,
+       PRIMARY KEY(INPUT_DATA_HANDLING_ID),
+       FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+       FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
+CREATE TABLE ADVANCE_OUTPUT_DATA_HANDLING
+(
+       OUTPUT_DATA_HANDLING_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,
+       EXPERIMENT_ID VARCHAR(255),
+       TASK_ID VARCHAR(255),
+       OUTPUT_DATA_DIR VARCHAR(255),
+       DATA_REG_URL VARCHAR (255),
+       PERSIST_OUTPUT_DATA SMALLINT,
+       PRIMARY KEY(OUTPUT_DATA_HANDLING_ID),
+       FOREIGN KEY (EXPERIMENT_ID) REFERENCES EXPERIMENT(EXPERIMENT_ID) ON DELETE CASCADE,
+       FOREIGN KEY (TASK_ID) REFERENCES TASK_DETAIL(TASK_ID) ON DELETE CASCADE
+);
+
 CREATE TABLE QOS_PARAMS
 (
         QOS_ID INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY,


[8/8] git commit: new datamodels for AIRAVATA-1017

Posted by ch...@apache.org.
new datamodels for AIRAVATA-1017


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/a0c1cbde
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/a0c1cbde
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/a0c1cbde

Branch: refs/heads/master
Commit: a0c1cbde996ad25a3530a6d322cab918261ccda6
Parents: 0cd438c
Author: chathuri <ch...@apache.org>
Authored: Mon Feb 17 14:29:38 2014 -0500
Committer: chathuri <ch...@apache.org>
Committed: Mon Feb 17 14:29:38 2014 -0500

----------------------------------------------------------------------
 .../persistance/registry/jpa/ResourceUtils.java |  40 +-
 .../registry/jpa/impl/AiravataJPARegistry.java  | 787 ++++++++-------
 .../registry/jpa/impl/ExperimentRegistry.java   | 605 ++++++------
 .../jpa/model/AdvancedInputDataHandling.java    | 125 +++
 .../jpa/model/AdvancedOutputDataHandling.java   | 115 +++
 .../registry/jpa/model/ApplicationInput.java    |  94 ++
 .../registry/jpa/model/ApplicationInput_PK.java |  63 ++
 .../registry/jpa/model/ApplicationOutput.java   |  94 ++
 .../jpa/model/ApplicationOutput_PK.java         |  62 ++
 .../Computational_Resource_Scheduling.java      | 176 ++++
 .../registry/jpa/model/DataTransferDetail.java  |  83 ++
 .../registry/jpa/model/ErrorDetails.java        | 178 ++++
 .../registry/jpa/model/Execution_Error.java     | 205 ----
 .../registry/jpa/model/Experiment.java          | 267 +++--
 .../jpa/model/ExperimentConfigData.java         |  82 ++
 .../model/Experiment_Configuration_Data.java    | 314 ------
 .../registry/jpa/model/Experiment_Data.java     |  66 --
 .../registry/jpa/model/Experiment_Input.java    |  42 +-
 .../registry/jpa/model/Experiment_Metadata.java | 149 ---
 .../registry/jpa/model/Experiment_Output.java   |  40 +-
 .../registry/jpa/model/Experiment_Summary.java  |  74 --
 .../registry/jpa/model/GFac_Job_Data.java       | 177 ----
 .../registry/jpa/model/GFac_Job_Status.java     |  73 --
 .../registry/jpa/model/Gram_Data.java           |  97 --
 .../registry/jpa/model/Gram_DataPK.java         |  62 --
 .../registry/jpa/model/JobDetail.java           |  85 ++
 .../registry/jpa/model/JobDetails_PK.java       |  62 ++
 .../registry/jpa/model/NodeInput.java           |  94 ++
 .../registry/jpa/model/NodeInput_PK.java        |  62 ++
 .../registry/jpa/model/NodeOutput.java          |  94 ++
 .../registry/jpa/model/NodeOutput_PK.java       |  62 ++
 .../registry/jpa/model/Node_Data.java           | 143 ---
 .../registry/jpa/model/Node_DataPK.java         |  72 --
 .../registry/jpa/model/Orchestrator.java        | 109 ---
 .../registry/jpa/model/QosParam.java            | 115 +++
 .../persistance/registry/jpa/model/Status.java  | 170 ++++
 .../registry/jpa/model/TaskDetail.java          |  93 ++
 .../registry/jpa/model/WorkflowNodeDetail.java  |  83 ++
 .../registry/jpa/model/Workflow_Data.java       |  95 --
 .../jpa/resources/ExecutionErrorResource.java   | 434 ++++-----
 .../resources/ExperimentConfigDataResource.java | 778 +++++++--------
 .../jpa/resources/ExperimentInputResource.java  | 226 ++---
 .../resources/ExperimentMetadataResource.java   | 964 +++++++++----------
 .../jpa/resources/ExperimentOutputResource.java | 226 ++---
 .../resources/ExperimentSummaryResource.java    | 224 ++---
 .../jpa/resources/GFacJobDataResource.java      | 514 +++++-----
 .../jpa/resources/GFacJobStatusResource.java    | 226 ++---
 .../registry/jpa/resources/GatewayResource.java | 142 +--
 .../jpa/resources/GramDataResource.java         | 266 ++---
 .../jpa/resources/NodeDataResource.java         | 548 +++++------
 .../jpa/resources/OrchestratorDataResource.java | 378 ++++----
 .../registry/jpa/resources/ProjectResource.java | 116 +--
 .../registry/jpa/resources/Utils.java           | 482 +++++-----
 .../registry/jpa/resources/WorkerResource.java  | 172 ++--
 .../jpa/resources/WorkflowDataResource.java     | 370 +++----
 .../jpa/utils/ThriftDataModelConversion.java    | 228 ++---
 .../src/main/resources/META-INF/persistence.xml |  26 +-
 .../src/main/resources/registry-derby.sql       |  92 +-
 .../src/main/resources/registry-mysql.sql       |   4 +-
 59 files changed, 6142 insertions(+), 5683 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/ResourceUtils.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/ResourceUtils.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/ResourceUtils.java
index fe94062..97ed481 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/ResourceUtils.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/ResourceUtils.java
@@ -311,26 +311,26 @@ public class ResourceUtils {
         }
     }
 
-    public static List<Resource> getOrchestratorDataWithStatus(String status) {
-        List<Resource> resourceList = new ArrayList<Resource>();
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        QueryGenerator generator = new QueryGenerator(AbstractResource.ORCHESTRATOR);
-        generator.setParameter(AbstractResource.OrchestratorDataConstants.STATUS, status);
-        Query q = generator.selectQuery(em);
-        List<?> results = q.getResultList();
-        if (results.size() != 0) {
-            for (Object result : results) {
-                Orchestrator orchestratorData = (Orchestrator) result;
-                OrchestratorDataResource orchestratorDataResource = (OrchestratorDataResource)
-                        Utils.getResource(ResourceType.ORCHESTRATOR, orchestratorData);
-                resourceList.add(orchestratorDataResource);
-            }
-        }
-        em.getTransaction().commit();
-        em.close();
-        return resourceList;
-    }
+//    public static List<Resource> getOrchestratorDataWithStatus(String status) {
+//        List<Resource> resourceList = new ArrayList<Resource>();
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        QueryGenerator generator = new QueryGenerator(AbstractResource.ORCHESTRATOR);
+//        generator.setParameter(AbstractResource.OrchestratorDataConstants.STATUS, status);
+//        Query q = generator.selectQuery(em);
+//        List<?> results = q.getResultList();
+//        if (results.size() != 0) {
+//            for (Object result : results) {
+//                Orchestrator orchestratorData = (Orchestrator) result;
+//                OrchestratorDataResource orchestratorDataResource = (OrchestratorDataResource)
+//                        Utils.getResource(ResourceType.ORCHESTRATOR, orchestratorData);
+//                resourceList.add(orchestratorDataResource);
+//            }
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//        return resourceList;
+//    }
 
     public static Lock getLock() {
         return lock;

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/AiravataJPARegistry.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/AiravataJPARegistry.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/AiravataJPARegistry.java
index 9ec0c43..411de5a 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/AiravataJPARegistry.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/AiravataJPARegistry.java
@@ -20,18 +20,6 @@
 */
 package org.apache.airavata.persistance.registry.jpa.impl;
 
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Pattern;
-
 import org.apache.airavata.common.exception.AiravataConfigurationException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.AiravataJobState;
@@ -51,83 +39,37 @@ import org.apache.airavata.persistance.registry.jpa.JPAResourceAccessor;
 import org.apache.airavata.persistance.registry.jpa.Resource;
 import org.apache.airavata.persistance.registry.jpa.ResourceType;
 import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.resources.ApplicationDescriptorResource;
-import org.apache.airavata.persistance.registry.jpa.resources.ConfigurationResource;
-import org.apache.airavata.persistance.registry.jpa.resources.ExecutionErrorResource;
-//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentDataResource;
-import org.apache.airavata.persistance.registry.jpa.resources.ExperimentDataRetriever;
-import org.apache.airavata.persistance.registry.jpa.resources.ExperimentMetadataResource;
-//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentResource;
-import org.apache.airavata.persistance.registry.jpa.resources.GFacJobDataResource;
-import org.apache.airavata.persistance.registry.jpa.resources.GFacJobStatusResource;
-import org.apache.airavata.persistance.registry.jpa.resources.GatewayResource;
-import org.apache.airavata.persistance.registry.jpa.resources.HostDescriptorResource;
-import org.apache.airavata.persistance.registry.jpa.resources.NodeDataResource;
-import org.apache.airavata.persistance.registry.jpa.resources.OrchestratorDataResource;
-import org.apache.airavata.persistance.registry.jpa.resources.ProjectResource;
-import org.apache.airavata.persistance.registry.jpa.resources.PublishWorkflowResource;
-import org.apache.airavata.persistance.registry.jpa.resources.ServiceDescriptorResource;
-import org.apache.airavata.persistance.registry.jpa.resources.UserResource;
-import org.apache.airavata.persistance.registry.jpa.resources.UserWorkflowResource;
-import org.apache.airavata.persistance.registry.jpa.resources.WorkerResource;
-import org.apache.airavata.persistance.registry.jpa.resources.WorkflowDataResource;
+import org.apache.airavata.persistance.registry.jpa.resources.*;
 import org.apache.airavata.registry.api.*;
 import org.apache.airavata.registry.api.ExecutionErrors.Source;
-import org.apache.airavata.registry.api.exception.AiravataRegistryUninitializedException;
-import org.apache.airavata.registry.api.exception.GatewayNotRegisteredException;
-import org.apache.airavata.registry.api.exception.RegistryAPIVersionIncompatibleException;
-import org.apache.airavata.registry.api.exception.RegistryAccessorInstantiateException;
-import org.apache.airavata.registry.api.exception.RegistryAccessorNotFoundException;
-import org.apache.airavata.registry.api.exception.RegistryAccessorUndefinedException;
-import org.apache.airavata.registry.api.exception.RegistryException;
-import org.apache.airavata.registry.api.exception.RegistrySettingsException;
-import org.apache.airavata.registry.api.exception.UnimplementedRegistryOperationException;
-import org.apache.airavata.registry.api.exception.gateway.DescriptorAlreadyExistsException;
-import org.apache.airavata.registry.api.exception.gateway.DescriptorDoesNotExistsException;
-import org.apache.airavata.registry.api.exception.gateway.InsufficientDataException;
-import org.apache.airavata.registry.api.exception.gateway.MalformedDescriptorException;
-import org.apache.airavata.registry.api.exception.gateway.PublishedWorkflowAlreadyExistsException;
-import org.apache.airavata.registry.api.exception.gateway.PublishedWorkflowDoesNotExistsException;
-import org.apache.airavata.registry.api.exception.worker.ExperimentDoesNotExistsException;
-import org.apache.airavata.registry.api.exception.worker.ExperimentLazyLoadedException;
-import org.apache.airavata.registry.api.exception.worker.ApplicationJobAlreadyExistsException;
-import org.apache.airavata.registry.api.exception.worker.ApplicationJobDoesNotExistsException;
-import org.apache.airavata.registry.api.exception.worker.InvalidApplicationJobIDException;
-import org.apache.airavata.registry.api.exception.worker.UserWorkflowAlreadyExistsException;
-import org.apache.airavata.registry.api.exception.worker.UserWorkflowDoesNotExistsException;
-import org.apache.airavata.registry.api.exception.worker.WorkflowInstanceAlreadyExistsException;
-import org.apache.airavata.registry.api.exception.worker.WorkflowInstanceDoesNotExistsException;
-import org.apache.airavata.registry.api.exception.worker.WorkflowInstanceNodeAlreadyExistsException;
-import org.apache.airavata.registry.api.exception.worker.WorkflowInstanceNodeDoesNotExistsException;
-import org.apache.airavata.registry.api.exception.worker.WorkspaceProjectAlreadyExistsException;
-import org.apache.airavata.registry.api.exception.worker.WorkspaceProjectDoesNotExistsException;
+import org.apache.airavata.registry.api.exception.*;
+import org.apache.airavata.registry.api.exception.gateway.*;
+import org.apache.airavata.registry.api.exception.worker.*;
 import org.apache.airavata.registry.api.impl.WorkflowExecutionDataImpl;
 import org.apache.airavata.registry.api.util.RegistryConstants;
 import org.apache.airavata.registry.api.util.RegistrySettings;
-import org.apache.airavata.registry.api.workflow.ApplicationJobStatusData;
-import org.apache.airavata.registry.api.workflow.ExecutionError;
-import org.apache.airavata.registry.api.workflow.ExperimentData;
-import org.apache.airavata.registry.api.workflow.ExperimentExecutionError;
-import org.apache.airavata.registry.api.workflow.ApplicationJob;
+import org.apache.airavata.registry.api.workflow.*;
 import org.apache.airavata.registry.api.workflow.ApplicationJob.ApplicationJobStatus;
-import org.apache.airavata.registry.api.workflow.ApplicationJobExecutionError;
-import org.apache.airavata.registry.api.workflow.NodeExecutionData;
-import org.apache.airavata.registry.api.workflow.NodeExecutionError;
-import org.apache.airavata.registry.api.workflow.NodeExecutionStatus;
-import org.apache.airavata.registry.api.workflow.WorkflowExecution;
-import org.apache.airavata.registry.api.workflow.WorkflowExecutionData;
-import org.apache.airavata.registry.api.workflow.WorkflowExecutionError;
-import org.apache.airavata.registry.api.workflow.WorkflowExecutionStatus;
 import org.apache.airavata.registry.api.workflow.WorkflowExecutionStatus.State;
-import org.apache.airavata.registry.api.workflow.WorkflowIOData;
-import org.apache.airavata.registry.api.workflow.WorkflowInstanceNode;
-import org.apache.airavata.registry.api.workflow.WorkflowNodeGramData;
-import org.apache.airavata.registry.api.workflow.WorkflowNodeIOData;
-import org.apache.airavata.registry.api.workflow.WorkflowNodeType;
 import org.apache.xmlbeans.XmlException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.sql.Timestamp;
+import java.util.*;
+import java.util.regex.Pattern;
+
+//import org.apache.airavata.persistance.registry.jpa.resources.ExecutionErrorResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentDataResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentMetadataResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.ExperimentResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.GFacJobDataResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.GFacJobStatusResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.NodeDataResource;
+//import org.apache.airavata.persistance.registry.jpa.resources.OrchestratorDataResource;
+
 public class AiravataJPARegistry extends AiravataRegistry2{
     private final static Logger logger = LoggerFactory.getLogger(AiravataJPARegistry.class);
     private static Map<String, String[]> compatibleVersionMap;
@@ -986,21 +928,21 @@ public class AiravataJPARegistry extends AiravataRegistry2{
     	if (projectsRegistry != null){
             projectsRegistry.addExperiment(projectName, experiment);
         }else {
-            WorkspaceProject workspaceProject = getWorkspaceProject(projectName);
-            ProjectResource project = jpa.getWorker().getProject(createProjName(workspaceProject.getProjectName()));
-            String experimentId = experiment.getExperimentId();
-            if (isExperimentExists(experimentId)){
-                throw new ExperimentDoesNotExistsException(experimentId);
-            }
-            ExperimentMetadataResource experimentResource = project.createExperiment(experimentId);
-            experimentResource.setExperimentName(experimentId);
-            experimentResource.setGateway(jpa.getGateway());
-            experimentResource.setProject(project);
-            experimentResource.setExecutionUser(jpa.getWorker().getUser());
-            if (experiment.getSubmittedDate()!=null) {
-                experimentResource.setSubmittedDate(new Timestamp(experiment.getSubmittedDate().getTime()));
-            }
-            experimentResource.save();
+//            WorkspaceProject workspaceProject = getWorkspaceProject(projectName);
+//            ProjectResource project = jpa.getWorker().getProject(createProjName(workspaceProject.getProjectName()));
+//            String experimentId = experiment.getExperimentId();
+//            if (isExperimentExists(experimentId)){
+//                throw new ExperimentDoesNotExistsException(experimentId);
+//            }
+//            ExperimentMetadataResource experimentResource = project.createExperiment(experimentId);
+//            experimentResource.setExperimentName(experimentId);
+//            experimentResource.setGateway(jpa.getGateway());
+//            experimentResource.setProject(project);
+//            experimentResource.setExecutionUser(jpa.getWorker().getUser());
+//            if (experiment.getSubmittedDate()!=null) {
+//                experimentResource.setSubmittedDate(new Timestamp(experiment.getSubmittedDate().getTime()));
+//            }
+//            experimentResource.save();
         }
     }
 
@@ -1022,37 +964,39 @@ public class AiravataJPARegistry extends AiravataRegistry2{
         }
         WorkerResource worker = jpa.getWorker();
     	List<AiravataExperiment> result=new ArrayList<AiravataExperiment>();
-    	List<ExperimentMetadataResource> experiments = worker.getExperiments();
-    	for (ExperimentMetadataResource resource : experiments) {
-			AiravataExperiment e = createAiravataExperimentObj(resource);
-			result.add(e);
-		}
-        return result;
+//    	List<ExperimentMetadataResource> experiments = worker.getExperiments();
+//    	for (ExperimentMetadataResource resource : experiments) {
+//			AiravataExperiment e = createAiravataExperimentObj(resource);
+//			result.add(e);
+//		}
+//        return result;
+        return null;
     }
 
-	private AiravataExperiment createAiravataExperimentObj(
-			ExperimentMetadataResource resource) {
-		AiravataExperiment e = new AiravataExperiment();
-		e.setExperimentId(resource.getExpID());
-		e.setUser(new AiravataUser(resource.getExecutionUser()));
-		e.setSubmittedDate(new Date(resource.getSubmittedDate().getTime()));
-		e.setGateway(new Gateway(resource.getGateway().getGatewayName()));
-		e.setProject(new WorkspaceProject(getProjName(resource.getProject().getName()), this));
-		return e;
-	}
+//	private AiravataExperiment createAiravataExperimentObj(
+//			ExperimentMetadataResource resource) {
+//		AiravataExperiment e = new AiravataExperiment();
+//		e.setExperimentId(resource.getExpID());
+//		e.setUser(new AiravataUser(resource.getExecutionUser()));
+//		e.setSubmittedDate(new Date(resource.getSubmittedDate().getTime()));
+//		e.setGateway(new Gateway(resource.getGateway().getGatewayName()));
+//		e.setProject(new WorkspaceProject(getProjName(resource.getProject().getName()), this));
+//		return e;
+//	}
 
     public List<AiravataExperiment> getExperiments(String projectName)throws RegistryException {
         if (projectsRegistry != null){
             return projectsRegistry.getExperiments(projectName);
         }
         ProjectResource project = jpa.getWorker().getProject(createProjName(projectName));
-    	List<ExperimentMetadataResource> experiments = project.getExperiments();
-    	List<AiravataExperiment> result=new ArrayList<AiravataExperiment>();
-    	for (ExperimentMetadataResource resource : experiments) {
-			AiravataExperiment e = createAiravataExperimentObj(resource);
-			result.add(e);
-		}
-        return result;
+//    	List<ExperimentMetadataResource> experiments = project.getExperiments();
+//    	List<AiravataExperiment> result=new ArrayList<AiravataExperiment>();
+//    	for (ExperimentMetadataResource resource : experiments) {
+//			AiravataExperiment e = createAiravataExperimentObj(resource);
+//			result.add(e);
+//		}
+//        return result;
+        return null;
     }
 
     public List<AiravataExperiment> getExperiments(Date from, Date to)throws RegistryException {
@@ -1317,9 +1261,9 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             if (!isExperimentExists(experimentId, true)){
                 throw new ExperimentDoesNotExistsException(experimentId);
             }
-            ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
-            experiment.setExecutionUser(user);
-            experiment.save();
+//            ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
+//            experiment.setExecutionUser(user);
+//            experiment.save();
         }
 	}
 
@@ -1333,8 +1277,9 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 		if (!isExperimentExists(experimentId)){
 			throw new ExperimentDoesNotExistsException(experimentId);
 		}
-		ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
-		return experiment.getExecutionUser();
+//		ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
+//		return experiment.getExecutionUser();
+        return null;
 	}
 
     @Override
@@ -1368,9 +1313,9 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             if (!isExperimentExists(experimentId, true)){
                 throw new ExperimentDoesNotExistsException(experimentId);
             }
-            ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
-            experiment.setExperimentName(experimentName);
-            experiment.save();
+//            ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
+//            experiment.setExperimentName(experimentName);
+//            experiment.save();
         }
 	}
 
@@ -1385,7 +1330,7 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 		if (!isExperimentExists(experimentId, true)){
 			throw new ExperimentDoesNotExistsException(experimentId);
 		}
-		ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
+//		ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
 //		ExperimentDataResource data = experiment.getData();
 //		if (data.isExperimentMetadataPresent()){
 //			return data.getExperimentMetadata().getMetadata();
@@ -1457,15 +1402,16 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 		if (!isExperimentExists(experimentId)){
 			throw new ExperimentDoesNotExistsException(experimentId);
 		}
-		ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
-		List<WorkflowExecution> result=new ArrayList<WorkflowExecution>();
-		List<WorkflowDataResource> workflowInstances = experiment.getWorkflowInstances();
-		for (WorkflowDataResource resource : workflowInstances) {
-			WorkflowExecution workflowInstance = new WorkflowExecution(resource.getExperimentID(), resource.getWorkflowInstanceID());
-			workflowInstance.setTemplateName(resource.getTemplateName());
-			result.add(workflowInstance);
-		}
-		return result;
+//		ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
+//		List<WorkflowExecution> result=new ArrayList<WorkflowExecution>();
+//		List<WorkflowDataResource> workflowInstances = experiment.getWorkflowInstances();
+//		for (WorkflowDataResource resource : workflowInstances) {
+//			WorkflowExecution workflowInstance = new WorkflowExecution(resource.getExperimentID(), resource.getWorkflowInstanceID());
+//			workflowInstance.setTemplateName(resource.getTemplateName());
+//			result.add(workflowInstance);
+//		}
+//		return result;
+        return null;
 	}
 
 
@@ -1567,10 +1513,10 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             if (!isWorkflowInstanceNodePresent(node.getWorkflowInstance().getWorkflowExecutionId(),node.getNodeId(),true)){
                 throw new WorkflowInstanceNodeDoesNotExistsException(node.getWorkflowInstance().getWorkflowExecutionId(), node.getNodeId());
             }
-            WorkflowDataResource wi = jpa.getWorker().getWorkflowInstance(node.getWorkflowInstance().getWorkflowExecutionId());
-            NodeDataResource nodeData = wi.getNodeData(node.getNodeId());
-            nodeData.setInputs(data);
-            nodeData.save();
+//            WorkflowDataResource wi = jpa.getWorker().getWorkflowInstance(node.getWorkflowInstance().getWorkflowExecutionId());
+//            NodeDataResource nodeData = wi.getNodeData(node.getNodeId());
+//            nodeData.setInputs(data);
+//            nodeData.save();
         }
 	}
 
@@ -1584,10 +1530,10 @@ public class AiravataJPARegistry extends AiravataRegistry2{
                 if (!isWorkflowInstanceNodePresent(node.getWorkflowInstance().getWorkflowExecutionId(),node.getNodeId(),true)){
                     throw new WorkflowInstanceNodeDoesNotExistsException(node.getWorkflowInstance().getWorkflowExecutionId(), node.getNodeId());
                 }
-                WorkflowDataResource wi = jpa.getWorker().getWorkflowInstance(node.getWorkflowInstance().getWorkflowExecutionId());
-                NodeDataResource nodeData = wi.getNodeData(node.getNodeId());
-                nodeData.setOutputs(data);
-                nodeData.save();
+//                WorkflowDataResource wi = jpa.getWorker().getWorkflowInstance(node.getWorkflowInstance().getWorkflowExecutionId());
+//                NodeDataResource nodeData = wi.getNodeData(node.getNodeId());
+//                nodeData.setOutputs(data);
+//                nodeData.save();
             } catch (RegistryException e) {
                 e.printStackTrace();
                 throw e;
@@ -1765,17 +1711,17 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             if (!isWorkflowInstanceNodePresent(workflowInstance.getWorkflowExecutionId(), nodeId, true)){
                 throw new WorkflowInstanceNodeDoesNotExistsException(workflowInstance.getWorkflowExecutionId(), nodeId);
             }
-            NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(workflowInstance.getWorkflowExecutionId()).getNodeData(nodeId);
-            nodeData.setStatus(workflowStatusNode.getExecutionStatus().toString());
-            Timestamp t = new Timestamp(workflowStatusNode.getStatusUpdateTime().getTime());
-            if (workflowStatusNode.getExecutionStatus()==State.STARTED){
-                nodeData.setStartTime(t);
-            }
-            nodeData.setLastUpdateTime(t);
-            nodeData.save();
+//            NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(workflowInstance.getWorkflowExecutionId()).getNodeData(nodeId);
+//            nodeData.setStatus(workflowStatusNode.getExecutionStatus().toString());
+//            Timestamp t = new Timestamp(workflowStatusNode.getStatusUpdateTime().getTime());
+//            if (workflowStatusNode.getExecutionStatus()==State.STARTED){
+//                nodeData.setStartTime(t);
+//            }
+//            nodeData.setLastUpdateTime(t);
+//            nodeData.save();
             //Each time node status is updated the the time of update for the workflow status is going to be the same
-            WorkflowExecutionStatus currentWorkflowInstanceStatus = getWorkflowInstanceStatus(workflowInstance.getWorkflowExecutionId());
-            updateWorkflowInstanceStatus(new WorkflowExecutionStatus(workflowInstance, currentWorkflowInstanceStatus.getExecutionStatus(), t));
+//            WorkflowExecutionStatus currentWorkflowInstanceStatus = getWorkflowInstanceStatus(workflowInstance.getWorkflowExecutionId());
+//            updateWorkflowInstanceStatus(new WorkflowExecutionStatus(workflowInstance, currentWorkflowInstanceStatus.getExecutionStatus(), t));
         }
 	}
 
@@ -1815,9 +1761,10 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 			throw new WorkflowInstanceNodeDoesNotExistsException(id, nodeId);
 		}
 		WorkflowDataResource workflowInstance = jpa.getWorker().getWorkflowInstance(id);
-		NodeDataResource nodeData = workflowInstance.getNodeData(nodeId);
-		return new NodeExecutionStatus(new WorkflowInstanceNode(new WorkflowExecution(workflowInstance.getExperimentID(), workflowInstance.getWorkflowInstanceID()), nodeData.getNodeID()), nodeData.getStatus()==null?null:State.valueOf(nodeData.getStatus()),nodeData.getLastUpdateTime());
-	}
+//		NodeDataResource nodeData = workflowInstance.getNodeData(nodeId);
+//		return new NodeExecutionStatus(new WorkflowInstanceNode(new WorkflowExecution(workflowInstance.getExperimentID(), workflowInstance.getWorkflowInstanceID()), nodeData.getNodeID()), nodeData.getStatus()==null?null:State.valueOf(nodeData.getStatus()),nodeData.getLastUpdateTime());
+	    return null;
+    }
 
 
 	@Override
@@ -1832,8 +1779,9 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 			throw new WorkflowInstanceNodeDoesNotExistsException(id, nodeId);
 		}
 		WorkflowDataResource workflowInstance = jpa.getWorker().getWorkflowInstance(id);
-		NodeDataResource nodeData = workflowInstance.getNodeData(nodeId);
-		return nodeData.getStartTime();
+//		NodeDataResource nodeData = workflowInstance.getNodeData(nodeId);
+//		return nodeData.getStartTime();
+        return null;
 	}
 
 
@@ -1887,12 +1835,13 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             WorkflowExecution workflowInstance = new WorkflowExecution(resource.getExperimentID(), resource.getWorkflowInstanceID());
             workflowInstance.setTemplateName(resource.getTemplateName());
             WorkflowExecutionData workflowInstanceData = new WorkflowExecutionDataImpl(null, workflowInstance, new WorkflowExecutionStatus(workflowInstance, resource.getStatus()==null? null:State.valueOf(resource.getStatus()),resource.getLastUpdatedTime()), null);
-            List<NodeDataResource> nodeData = resource.getNodeData();
-            for (NodeDataResource nodeDataResource : nodeData) {
-                workflowInstanceData.getNodeDataList().add(getWorkflowInstanceNodeData(workflowInstanceId, nodeDataResource.getNodeID()));
-            }
+//            List<NodeDataResource> nodeData = resource.getNodeData();
+//            for (NodeDataResource nodeDataResource : nodeData) {
+//                workflowInstanceData.getNodeDataList().add(getWorkflowInstanceNodeData(workflowInstanceId, nodeDataResource.getNodeID()));
+//            }
             return workflowInstanceData;
-        } catch (ExperimentLazyLoadedException e) {
+//        } catch (ExperimentLazyLoadedException e) {
+        } catch (Exception e) {
             throw new RegistryException(e);
         }
 
@@ -1908,13 +1857,14 @@ public class AiravataJPARegistry extends AiravataRegistry2{
         if (!isWorkflowInstanceNodePresent(workflowInstanceId, nodeId)){
 			throw new WorkflowInstanceNodeDoesNotExistsException(workflowInstanceId,nodeId);
 		}
-		NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(workflowInstanceId).getNodeData(nodeId);
-		NodeExecutionData data = new NodeExecutionData(new WorkflowInstanceNode(new WorkflowExecution(nodeData.getWorkflowDataResource().getExperimentID(),nodeData.getWorkflowDataResource().getWorkflowInstanceID()),nodeData.getNodeID()));
-		data.setInput(nodeData.getInputs());
-		data.setOutput(nodeData.getOutputs());
-        data.setType(WorkflowNodeType.getType(nodeData.getNodeType()).getNodeType());
+//		NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(workflowInstanceId).getNodeData(nodeId);
+//		NodeExecutionData data = new NodeExecutionData(new WorkflowInstanceNode(new WorkflowExecution(nodeData.getWorkflowDataResource().getExperimentID(),nodeData.getWorkflowDataResource().getWorkflowInstanceID()),nodeData.getNodeID()));
+//		data.setInput(nodeData.getInputs());
+//		data.setOutput(nodeData.getOutputs());
+//        data.setType(WorkflowNodeType.getType(nodeData.getNodeType()).getNodeType());
 		//TODO setup status
-		return data;
+//		return data;
+        return null;
 	}
 
 
@@ -1960,10 +1910,10 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             if (isWorkflowInstanceExists(workflowInstanceId)){
                 throw new WorkflowInstanceAlreadyExistsException(workflowInstanceId);
             }
-            ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
-            WorkflowDataResource workflowInstanceResource = experiment.createWorkflowInstanceResource(workflowInstanceId);
-            workflowInstanceResource.setTemplateName(templateName);
-            workflowInstanceResource.save();
+//            ExperimentMetadataResource experiment = jpa.getWorker().getExperiment(experimentId);
+//            WorkflowDataResource workflowInstanceResource = experiment.createWorkflowInstanceResource(workflowInstanceId);
+//            workflowInstanceResource.setTemplateName(templateName);
+//            workflowInstanceResource.save();
         }
 	}
 
@@ -1978,9 +1928,9 @@ public class AiravataJPARegistry extends AiravataRegistry2{
                 if (!isWorkflowInstanceNodePresent(node.getWorkflowInstance().getWorkflowExecutionId(),node.getNodeId(), true)){
                     throw new WorkflowInstanceNodeDoesNotExistsException(node.getWorkflowInstance().getWorkflowExecutionId(),node.getNodeId());
                 }
-                NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(node.getWorkflowInstance().getWorkflowExecutionId()).getNodeData(node.getNodeId());
-                nodeData.setNodeType(type.getNodeType().toString());
-                nodeData.save();
+//                NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(node.getWorkflowInstance().getWorkflowExecutionId()).getNodeData(node.getNodeId());
+//                nodeData.setNodeType(type.getNodeType().toString());
+//                nodeData.save();
             } catch (RegistryException e) {
                 e.printStackTrace();
                 throw e;
@@ -1998,8 +1948,8 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             if (isWorkflowInstanceNodePresent(workflowInstanceId, nodeId)){
                 throw new WorkflowInstanceNodeAlreadyExistsException(workflowInstanceId, nodeId);
             }
-            NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(workflowInstanceId).createNodeData(nodeId);
-            nodeData.save();
+//            NodeDataResource nodeData = jpa.getWorker().getWorkflowInstance(workflowInstanceId).createNodeData(nodeId);
+//            nodeData.save();
         }
 
 	}
@@ -2082,14 +2032,15 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             return provenanceRegistry.getExperimentExecutionErrors(experimentId);
         }
 		List<ExperimentExecutionError> result=new ArrayList<ExperimentExecutionError>();
-		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.EXPERIMENT.toString(), experimentId, null, null, null);
-		for (ExecutionErrorResource errorResource : executionErrors) {
-			ExperimentExecutionError error = new ExperimentExecutionError();
-			setupValues(errorResource, error);
-			error.setExperimentId(errorResource.getMetadataResource().getExpID());
-			result.add(error);
-		}
-		return result;
+//		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.EXPERIMENT.toString(), experimentId, null, null, null);
+//		for (ExecutionErrorResource errorResource : executionErrors) {
+//			ExperimentExecutionError error = new ExperimentExecutionError();
+//			setupValues(errorResource, error);
+//			error.setExperimentId(errorResource.getMetadataResource().getExpID());
+//			result.add(error);
+//		}
+//		return result;
+        return  null;
 	}
 
 	@Override
@@ -2100,15 +2051,16 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             return provenanceRegistry.getWorkflowExecutionErrors(experimentId, workflowInstanceId);
         }
 		List<WorkflowExecutionError> result=new ArrayList<WorkflowExecutionError>();
-		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.WORKFLOW.toString(), experimentId, workflowInstanceId, null, null);
-		for (ExecutionErrorResource errorResource : executionErrors) {
-			WorkflowExecutionError error = new WorkflowExecutionError();
-			setupValues(errorResource, error);
-			error.setExperimentId(errorResource.getMetadataResource().getExpID());
-			error.setWorkflowInstanceId(errorResource.getWorkflowDataResource().getWorkflowInstanceID());
-			result.add(error);
-		}
-		return result;
+//		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.WORKFLOW.toString(), experimentId, workflowInstanceId, null, null);
+//		for (ExecutionErrorResource errorResource : executionErrors) {
+//			WorkflowExecutionError error = new WorkflowExecutionError();
+//			setupValues(errorResource, error);
+//			error.setExperimentId(errorResource.getMetadataResource().getExpID());
+//			error.setWorkflowInstanceId(errorResource.getWorkflowDataResource().getWorkflowInstanceID());
+//			result.add(error);
+//		}
+//		return result;
+        return null;
 	}
 
 	@Override
@@ -2118,16 +2070,17 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             return provenanceRegistry.getNodeExecutionErrors(experimentId, workflowInstanceId, nodeId);
         }
 		List<NodeExecutionError> result=new ArrayList<NodeExecutionError>();
-		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.NODE.toString(), experimentId, workflowInstanceId, nodeId, null);
-		for (ExecutionErrorResource errorResource : executionErrors) {
-			NodeExecutionError error = new NodeExecutionError();
-			setupValues(errorResource, error);
-			error.setExperimentId(errorResource.getMetadataResource().getExpID());
-			error.setNodeId(errorResource.getNodeID());
-			error.setWorkflowInstanceId(errorResource.getWorkflowDataResource().getWorkflowInstanceID());
-			result.add(error);
-		}
-		return result;
+//		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.NODE.toString(), experimentId, workflowInstanceId, nodeId, null);
+//		for (ExecutionErrorResource errorResource : executionErrors) {
+//			NodeExecutionError error = new NodeExecutionError();
+//			setupValues(errorResource, error);
+//			error.setExperimentId(errorResource.getMetadataResource().getExpID());
+//			error.setNodeId(errorResource.getNodeID());
+//			error.setWorkflowInstanceId(errorResource.getWorkflowDataResource().getWorkflowInstanceID());
+//			result.add(error);
+//		}
+//		return result;
+        return null;
 	}
 
 	@Override
@@ -2138,31 +2091,32 @@ public class AiravataJPARegistry extends AiravataRegistry2{
             return provenanceRegistry.getApplicationJobErrors(experimentId, workflowInstanceId, nodeId, gfacJobId);
         }
 		List<ApplicationJobExecutionError> result=new ArrayList<ApplicationJobExecutionError>();
-		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.APPLICATION.toString(), experimentId, workflowInstanceId, nodeId, gfacJobId);
-		for (ExecutionErrorResource errorResource : executionErrors) {
-			ApplicationJobExecutionError error = new ApplicationJobExecutionError();
-			setupValues(errorResource, error);
-			error.setExperimentId(errorResource.getMetadataResource().getExpID());
-			error.setJobId(errorResource.getGfacJobID());
-			error.setNodeId(errorResource.getNodeID());
-			error.setWorkflowInstanceId(errorResource.getWorkflowDataResource().getWorkflowInstanceID());
-			result.add(error);
-		}
-		return result;
+//		List<ExecutionErrorResource> executionErrors = jpa.getWorker().getExperiment(experimentId).getExecutionErrors(Source.APPLICATION.toString(), experimentId, workflowInstanceId, nodeId, gfacJobId);
+//		for (ExecutionErrorResource errorResource : executionErrors) {
+//			ApplicationJobExecutionError error = new ApplicationJobExecutionError();
+//			setupValues(errorResource, error);
+//			error.setExperimentId(errorResource.getMetadataResource().getExpID());
+//			error.setJobId(errorResource.getGfacJobID());
+//			error.setNodeId(errorResource.getNodeID());
+//			error.setWorkflowInstanceId(errorResource.getWorkflowDataResource().getWorkflowInstanceID());
+//			result.add(error);
+//		}
+//		return result;
+        return null;
 	}
 
-	private void setupValues(ExecutionErrorResource source,
-			ExecutionError destination) {
-		destination.setActionTaken(source.getActionTaken());
-		destination.setErrorCode(source.getErrorCode());
-		destination.setErrorDescription(source.getErrorDes());
-		destination.setErrorLocation(source.getErrorLocation());
-		destination.setErrorMessage(source.getErrorMsg());
-		destination.setErrorReported(source.getErrorReporter());
-		destination.setErrorTime(source.getErrorTime());
-		destination.setSource(Source.valueOf(source.getSourceType()));
-		destination.setErrorReference(source.getErrorReference());
-	}
+//	private void setupValues(ExecutionErrorResource source,
+//			ExecutionError destination) {
+//		destination.setActionTaken(source.getActionTaken());
+//		destination.setErrorCode(source.getErrorCode());
+//		destination.setErrorDescription(source.getErrorDes());
+//		destination.setErrorLocation(source.getErrorLocation());
+//		destination.setErrorMessage(source.getErrorMsg());
+//		destination.setErrorReported(source.getErrorReporter());
+//		destination.setErrorTime(source.getErrorTime());
+//		destination.setSource(Source.valueOf(source.getSourceType()));
+//		destination.setErrorReference(source.getErrorReference());
+//	}
 
 	@Override
 	public List<ApplicationJobExecutionError> getApplicationJobErrors(String gfacJobId)
@@ -2207,33 +2161,34 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 		if (provenanceRegistry != null){
             return provenanceRegistry.addExperimentError(error);
         }
-		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error,ExecutionErrors.Source.EXPERIMENT);
-		executionError.save();
-		return executionError.getErrorID();
-	}
-
-	private ExecutionErrorResource createNewExecutionErrorResource(
-			String experimentId, ExecutionError errorSource, ExecutionErrors.Source type) throws RegistryException {
-		if (!isExperimentExists(experimentId)){
-			throw new ExperimentDoesNotExistsException(experimentId);
-		}
-		ExecutionErrorResource executionError = jpa.getWorker().getExperiment(experimentId).createExecutionError();
-		setupValues(errorSource, executionError);
-		executionError.setSourceType(type.toString());
-		return executionError;
+//		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error,ExecutionErrors.Source.EXPERIMENT);
+//		executionError.save();
+//		return executionError.getErrorID();
+        return 0;
 	}
 
-	private void setupValues(ExecutionError source,
-			ExecutionErrorResource destination) {
-		destination.setErrorCode(source.getErrorCode());
-		destination.setErrorDes(source.getErrorDescription());
-		destination.setErrorLocation(source.getErrorLocation());
-		destination.setErrorMsg(source.getErrorMessage());
-		destination.setErrorReference(source.getErrorReference());
-		destination.setErrorReporter(source.getErrorReported());
-		destination.setErrorTime(new Timestamp(source.getErrorTime().getTime()));
-		destination.setActionTaken(source.getActionTaken());
-	}
+//	private ExecutionErrorResource createNewExecutionErrorResource(
+//			String experimentId, ExecutionError errorSource, ExecutionErrors.Source type) throws RegistryException {
+//		if (!isExperimentExists(experimentId)){
+//			throw new ExperimentDoesNotExistsException(experimentId);
+//		}
+//		ExecutionErrorResource executionError = jpa.getWorker().getExperiment(experimentId).createExecutionError();
+//		setupValues(errorSource, executionError);
+//		executionError.setSourceType(type.toString());
+//		return executionError;
+//	}
+
+//	private void setupValues(ExecutionError source,
+//			ExecutionErrorResource destination) {
+//		destination.setErrorCode(source.getErrorCode());
+//		destination.setErrorDes(source.getErrorDescription());
+//		destination.setErrorLocation(source.getErrorLocation());
+//		destination.setErrorMsg(source.getErrorMessage());
+//		destination.setErrorReference(source.getErrorReference());
+//		destination.setErrorReporter(source.getErrorReported());
+//		destination.setErrorTime(new Timestamp(source.getErrorTime().getTime()));
+//		destination.setActionTaken(source.getActionTaken());
+//	}
 
 	@Override
 	public int addWorkflowExecutionError(WorkflowExecutionError error)
@@ -2241,10 +2196,11 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 		if (provenanceRegistry != null){
             return provenanceRegistry.addWorkflowExecutionError(error);
         }
-		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error,ExecutionErrors.Source.WORKFLOW);
-		executionError.setWorkflowDataResource(jpa.getWorker().getExperiment(error.getExperimentId()).getWorkflowInstance(error.getWorkflowInstanceId()));
-		executionError.save();
-		return executionError.getErrorID();
+//		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error,ExecutionErrors.Source.WORKFLOW);
+//		executionError.setWorkflowDataResource(jpa.getWorker().getExperiment(error.getExperimentId()).getWorkflowInstance(error.getWorkflowInstanceId()));
+//		executionError.save();
+//		return executionError.getErrorID();
+        return 0;
 	}
 
 	@Override
@@ -2253,25 +2209,28 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 		if (provenanceRegistry != null){
             return provenanceRegistry.addNodeExecutionError(error);
         }
-		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error, Source.NODE);
-		executionError.setWorkflowDataResource(jpa.getWorker().getExperiment(error.getExperimentId()).getWorkflowInstance(error.getWorkflowInstanceId()));
-		executionError.setNodeID(error.getNodeId());
-		executionError.save();
-		return executionError.getErrorID();
+//		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error, Source.NODE);
+//		executionError.setWorkflowDataResource(jpa.getWorker().getExperiment(error.getExperimentId()).getWorkflowInstance(error.getWorkflowInstanceId()));
+//		executionError.setNodeID(error.getNodeId());
+//		executionError.save();
+//		return executionError.getErrorID();
+        return 0;
 	}
 
+
 	@Override
 	public int addApplicationJobExecutionError(ApplicationJobExecutionError error)
 			throws RegistryException {
 		if (provenanceRegistry != null){
             return provenanceRegistry.addApplicationJobExecutionError(error);
         }
-		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error, Source.APPLICATION);
-		executionError.setWorkflowDataResource(jpa.getWorker().getExperiment(error.getExperimentId()).getWorkflowInstance(error.getWorkflowInstanceId()));
-		executionError.setNodeID(error.getNodeId());
-		executionError.setGfacJobID(error.getJobId());
-		executionError.save();
-		return executionError.getErrorID();
+//		ExecutionErrorResource executionError = createNewExecutionErrorResource(error.getExperimentId(),error, Source.APPLICATION);
+//		executionError.setWorkflowDataResource(jpa.getWorker().getExperiment(error.getExperimentId()).getWorkflowInstance(error.getWorkflowInstanceId()));
+//		executionError.setNodeID(error.getNodeId());
+//		executionError.setGfacJobID(error.getJobId());
+//		executionError.save();
+//		return executionError.getErrorID();
+        return 0;
 	}
 
 	@Override
@@ -2288,124 +2247,126 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 //		if (!isWorkflowInstanceNodePresent(job.getWorkflowExecutionId(), job.getNodeId())){
 //			throw new WorkflowInstanceNodeDoesNotExistsException(job.getWorkflowExecutionId(), job.getNodeId());
 //		}
-		ExperimentMetadataResource expData = jpa.getWorker().getExperiment(job.getExperimentId());
-		GFacJobDataResource gfacJob = expData.createGFacJob(job.getJobId());
-		gfacJob.setMetadataResource(expData);
-		gfacJob.setWorkflowDataResource(expData.getWorkflowInstance(job.getWorkflowExecutionId()));
-		gfacJob.setNodeID(job.getNodeId());
-		setupValues(job, gfacJob);
-		gfacJob.save();
-		addApplicationJobStatusData(job.getJobId(), job.getStatus(), job.getStatusUpdateTime(),gfacJob);
-	}
-
-	private void setupValues(ApplicationJob job, GFacJobDataResource gfacJob) {
-		gfacJob.setApplicationDescID(job.getApplicationDescriptionId());
-		gfacJob.setStatusUpdateTime(new Timestamp(job.getStatusUpdateTime().getTime()));
-		gfacJob.setHostDescID(job.getHostDescriptionId());
-		gfacJob.setJobData(job.getJobData());
-		gfacJob.setMetadata(job.getMetadata());
-		gfacJob.setServiceDescID(job.getServiceDescriptionId());
-		gfacJob.setStatus(job.getStatus().toString());
-		gfacJob.setSubmittedTime(new Timestamp(job.getSubmittedTime().getTime()));
-	}
+//		ExperimentMetadataResource expData = jpa.getWorker().getExperiment(job.getExperimentId());
+//		GFacJobDataResource gfacJob = expData.createGFacJob(job.getJobId());
+//		gfacJob.setMetadataResource(expData);
+//		gfacJob.setWorkflowDataResource(expData.getWorkflowInstance(job.getWorkflowExecutionId()));
+//		gfacJob.setNodeID(job.getNodeId());
+//		setupValues(job, gfacJob);
+//		gfacJob.save();
+//		addApplicationJobStatusData(job.getJobId(), job.getStatus(), job.getStatusUpdateTime(),gfacJob);
+	}
+
+//	private void setupValues(ApplicationJob job, GFacJobDataResource gfacJob) {
+//		gfacJob.setApplicationDescID(job.getApplicationDescriptionId());
+//		gfacJob.setStatusUpdateTime(new Timestamp(job.getStatusUpdateTime().getTime()));
+//		gfacJob.setHostDescID(job.getHostDescriptionId());
+//		gfacJob.setJobData(job.getJobData());
+//		gfacJob.setMetadata(job.getMetadata());
+//		gfacJob.setServiceDescID(job.getServiceDescriptionId());
+//		gfacJob.setStatus(job.getStatus().toString());
+//		gfacJob.setSubmittedTime(new Timestamp(job.getSubmittedTime().getTime()));
+//	}
 
 	@Override
 	public void updateApplicationJob(ApplicationJob job) throws RegistryException {
-		GFacJobDataResource gFacJob = validateAndGetGFacJob(job.getJobId());
-		setupValues(job, gFacJob);
-		gFacJob.save();
+//		GFacJobDataResource gFacJob = validateAndGetGFacJob(job.getJobId());
+//		setupValues(job, gFacJob);
+//		gFacJob.save();
 	}
 
-	private GFacJobDataResource validateAndGetGFacJob(String jobId)
-			throws InvalidApplicationJobIDException, RegistryException,
-			ApplicationJobDoesNotExistsException {
-		if (jobId==null || jobId.equals("")){
-			throw new InvalidApplicationJobIDException();
-		}
-		if (!isApplicationJobExists(jobId)){
-			throw new ApplicationJobDoesNotExistsException(jobId);
-		}
-		GFacJobDataResource gFacJob = jpa.getWorker().getGFacJob(jobId);
-		return gFacJob;
-	}
+//	private GFacJobDataResource validateAndGetGFacJob(String jobId)
+//			throws InvalidApplicationJobIDException, RegistryException,
+//			ApplicationJobDoesNotExistsException {
+//		if (jobId==null || jobId.equals("")){
+//			throw new InvalidApplicationJobIDException();
+//		}
+//		if (!isApplicationJobExists(jobId)){
+//			throw new ApplicationJobDoesNotExistsException(jobId);
+//		}
+//		GFacJobDataResource gFacJob = jpa.getWorker().getGFacJob(jobId);
+//		return gFacJob;
+//	}
 
 	@Override
 	public void updateApplicationJobStatus(String gfacJobId, ApplicationJobStatus status, Date statusUpdateTime)
 			throws RegistryException {
-		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
-		gFacJob.setStatus(status.toString());
-		gFacJob.setStatusUpdateTime(new Timestamp(statusUpdateTime.getTime()));
-		gFacJob.save();
-		addApplicationJobStatusData(gfacJobId, status, statusUpdateTime, null);
+//		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
+//		gFacJob.setStatus(status.toString());
+//		gFacJob.setStatusUpdateTime(new Timestamp(statusUpdateTime.getTime()));
+//		gFacJob.save();
+//		addApplicationJobStatusData(gfacJobId, status, statusUpdateTime, null);
 	}
 
 	@Override
 	public void updateApplicationJobData(String gfacJobId, String jobdata)
 			throws RegistryException {
-		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
-		gFacJob.setJobData(jobdata);
-		gFacJob.save();
+//		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
+//		gFacJob.setJobData(jobdata);
+//		gFacJob.save();
 	}
 
 	@Override
 	public void updateApplicationJobSubmittedTime(String gfacJobId, Date submitted)
 			throws RegistryException {
-		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
-		gFacJob.setSubmittedTime(new Timestamp(submitted.getTime()));
-		gFacJob.save();
+//		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
+//		gFacJob.setSubmittedTime(new Timestamp(submitted.getTime()));
+//		gFacJob.save();
 	}
 
 	@Override
 	public void updateApplicationJobStatusUpdateTime(String gfacJobId, Date completed)
 			throws RegistryException {
-		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
-		gFacJob.setStatusUpdateTime(new Timestamp(completed.getTime()));
-		gFacJob.save();
+//		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
+//		gFacJob.setStatusUpdateTime(new Timestamp(completed.getTime()));
+//		gFacJob.save();
 	}
 
 	@Override
 	public void updateApplicationJobMetadata(String gfacJobId, String metadata)
 			throws RegistryException {
-		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
-		gFacJob.setMetadata(metadata);
-		gFacJob.save();
+//		GFacJobDataResource gFacJob = validateAndGetGFacJob(gfacJobId);
+//		gFacJob.setMetadata(metadata);
+//		gFacJob.save();
 	}
 
 	@Override
 	public ApplicationJob getApplicationJob(String gfacJobId) throws RegistryException {
-		GFacJobDataResource gfacJob = validateAndGetGFacJob(gfacJobId);
-		ApplicationJob job = new ApplicationJob();
-		setupValues(gfacJob, job);
-		return job;
+//		GFacJobDataResource gfacJob = validateAndGetGFacJob(gfacJobId);
+//		ApplicationJob job = new ApplicationJob();
+//		setupValues(gfacJob, job);
+//		return job;
+        return null;
 	}
 
-	private void setupValues(GFacJobDataResource gfacJob, ApplicationJob job) {
-		job.setApplicationDescriptionId(gfacJob.getApplicationDescID());
-		job.setStatusUpdateTime(gfacJob.getStatusUpdateTime());
-		job.setExperimentId(gfacJob.getMetadataResource().getExpID());
-		job.setHostDescriptionId(gfacJob.getHostDescID());
-		job.setJobData(gfacJob.getJobData());
-		job.setJobId(gfacJob.getLocalJobID());
-		job.setStatus(ApplicationJobStatus.valueOf(gfacJob.getStatus()));
-		job.setMetadata(gfacJob.getMetadata());
-		job.setNodeId(gfacJob.getNodeID());
-		job.setServiceDescriptionId(gfacJob.getServiceDescID());
-		job.setSubmittedTime(gfacJob.getSubmittedTime());
-		job.setWorkflowExecutionId(gfacJob.getWorkflowDataResource().getWorkflowInstanceID());
-	}
+//	private void setupValues(GFacJobDataResource gfacJob, ApplicationJob job) {
+//		job.setApplicationDescriptionId(gfacJob.getApplicationDescID());
+//		job.setStatusUpdateTime(gfacJob.getStatusUpdateTime());
+//		job.setExperimentId(gfacJob.getMetadataResource().getExpID());
+//		job.setHostDescriptionId(gfacJob.getHostDescID());
+//		job.setJobData(gfacJob.getJobData());
+//		job.setJobId(gfacJob.getLocalJobID());
+//		job.setStatus(ApplicationJobStatus.valueOf(gfacJob.getStatus()));
+//		job.setMetadata(gfacJob.getMetadata());
+//		job.setNodeId(gfacJob.getNodeID());
+//		job.setServiceDescriptionId(gfacJob.getServiceDescID());
+//		job.setSubmittedTime(gfacJob.getSubmittedTime());
+//		job.setWorkflowExecutionId(gfacJob.getWorkflowDataResource().getWorkflowInstanceID());
+//	}
 
 	@Override
 	public List<ApplicationJob> getApplicationJobsForDescriptors(String serviceDescriptionId,
 			String hostDescriptionId, String applicationDescriptionId)
 			throws RegistryException {
 		List<ApplicationJob> jobs=new ArrayList<ApplicationJob>();
-		List<GFacJobDataResource> gFacJobs = jpa.getWorker().getGFacJobs(serviceDescriptionId,hostDescriptionId,applicationDescriptionId);
-		for (GFacJobDataResource resource : gFacJobs) {
-			ApplicationJob job = new ApplicationJob();
-			setupValues(resource, job);
-			jobs.add(job);
-		}
-		return jobs;
+//		List<GFacJobDataResource> gFacJobs = jpa.getWorker().getGFacJobs(serviceDescriptionId,hostDescriptionId,applicationDescriptionId);
+//		for (GFacJobDataResource resource : gFacJobs) {
+//			ApplicationJob job = new ApplicationJob();
+//			setupValues(resource, job);
+//			jobs.add(job);
+//		}
+//		return jobs;
+        return null;
 	}
 
 	@Override
@@ -2417,24 +2378,25 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 			if (!isExperimentExists(experimentId)){
 				throw new ExperimentDoesNotExistsException(experimentId);
 			}
-			gFacJobs = jpa.getWorker().getExperiment(experimentId).getGFacJobs();
+//			gFacJobs = jpa.getWorker().getExperiment(experimentId).getGFacJobs();
 		}else if (nodeId==null){
 			if (!isWorkflowInstanceExists(workflowExecutionId)){
 				throw new WorkflowInstanceDoesNotExistsException(workflowExecutionId);
 			}
-			gFacJobs = jpa.getWorker().getExperiment(experimentId).getWorkflowInstance(workflowExecutionId).getGFacJobs();
+//			gFacJobs = jpa.getWorker().getExperiment(experimentId).getWorkflowInstance(workflowExecutionId).getGFacJobs();
 		}else{
 			if (!isWorkflowInstanceNodePresent(workflowExecutionId, nodeId)){
 				throw new WorkflowInstanceNodeDoesNotExistsException(workflowExecutionId, nodeId);
 			}
-			gFacJobs = jpa.getWorker().getExperiment(experimentId).getWorkflowInstance(workflowExecutionId).getNodeData(nodeId).getGFacJobs();
+//			gFacJobs = jpa.getWorker().getExperiment(experimentId).getWorkflowInstance(workflowExecutionId).getNodeData(nodeId).getGFacJobs();
 		}
-		for (Resource resource : gFacJobs) {
-			ApplicationJob job = new ApplicationJob();
-			setupValues((GFacJobDataResource)resource, job);
-			jobs.add(job);
-		}
-		return jobs;
+//		for (Resource resource : gFacJobs) {
+//			ApplicationJob job = new ApplicationJob();
+//			setupValues((GFacJobDataResource)resource, job);
+//			jobs.add(job);
+//		}
+//		return jobs;
+        return null;
 	}
 
 	@Override
@@ -2446,11 +2408,12 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 	public List<ApplicationJobStatusData> getApplicationJobStatusHistory(
 			String jobId) throws RegistryException {
 		List<ApplicationJobStatusData> statusData=new ArrayList<ApplicationJobStatusData>();
-		List<GFacJobStatusResource> statuses = jpa.getWorker().getGFacJobStatuses(jobId);
-		for (GFacJobStatusResource resource : statuses) {
-			statusData.add(new ApplicationJobStatusData(resource.getLocalJobID(),ApplicationJobStatus.valueOf(resource.getStatus()),resource.getStatusUpdateTime()));	
-		}
-		return statusData;
+//		List<GFacJobStatusResource> statuses = jpa.getWorker().getGFacJobStatuses(jobId);
+//		for (GFacJobStatusResource resource : statuses) {
+//			statusData.add(new ApplicationJobStatusData(resource.getLocalJobID(),ApplicationJobStatus.valueOf(resource.getStatus()),resource.getStatusUpdateTime()));
+//		}
+//		return statusData;
+        return null;
 	}
 	
 	@Override
@@ -2468,17 +2431,17 @@ public class AiravataJPARegistry extends AiravataRegistry2{
 	   	return result;
 	}
 	
-	private void addApplicationJobStatusData(String jobId, ApplicationJobStatus status, Date updatedTime, GFacJobDataResource dataResource) throws RegistryException {
-		if (RegistrySettings.isApplicationJobStatusHistoryEnabled()){
-			if (dataResource==null){
-				dataResource = jpa.getWorker().getGFacJob(jobId);
-			}
-			GFacJobStatusResource s = (GFacJobStatusResource)dataResource.create(ResourceType.GFAC_JOB_STATUS);
-			s.setStatus(status.toString());
-			s.setStatusUpdateTime(new Timestamp(updatedTime.getTime()));
-			s.save();
-		}
-	}
+//	private void addApplicationJobStatusData(String jobId, ApplicationJobStatus status, Date updatedTime, GFacJobDataResource dataResource) throws RegistryException {
+//		if (RegistrySettings.isApplicationJobStatusHistoryEnabled()){
+//			if (dataResource==null){
+//				dataResource = jpa.getWorker().getGFacJob(jobId);
+//			}
+//			GFacJobStatusResource s = (GFacJobStatusResource)dataResource.create(ResourceType.GFAC_JOB_STATUS);
+//			s.setStatus(status.toString());
+//			s.setStatusUpdateTime(new Timestamp(updatedTime.getTime()));
+//			s.save();
+//		}
+//	}
 
 	@Override
 	public boolean isCredentialExist(String gatewayId, String tokenId)
@@ -2586,84 +2549,86 @@ public class AiravataJPARegistry extends AiravataRegistry2{
         return false;  //To change body of implemented methods use File | Settings | File Templates.
     }
 
-    public boolean storeExperiment(String userName, String experimentID, String applicationName, String jobRequest) throws RegistryException {
-    	GatewayResource gateway = jpa.getGateway();
-		OrchestratorDataResource dataResource = gateway.createOrchestratorData(experimentID); 
-		dataResource.setUserName(userName);
-		dataResource.setExperimentID(experimentID);
-		dataResource.setStatus(AiravataJobState.State.CREATED.toString());
-        dataResource.setJobRequest(jobRequest);
-        dataResource.setApplicationName(applicationName);
-		dataResource.save();
-		return true;
-	}
+//    public boolean storeExperiment(String userName, String experimentID, String applicationName, String jobRequest) throws RegistryException {
+//    	GatewayResource gateway = jpa.getGateway();
+//		OrchestratorDataResource dataResource = gateway.createOrchestratorData(experimentID);
+//		dataResource.setUserName(userName);
+//		dataResource.setExperimentID(experimentID);
+//		dataResource.setStatus(AiravataJobState.State.CREATED.toString());
+//        dataResource.setJobRequest(jobRequest);
+//        dataResource.setApplicationName(applicationName);
+//		dataResource.save();
+//		return true;
+//	}
 
 	public boolean changeStatus(String experimentID, AiravataJobState.State state) throws RegistryException {
 		GatewayResource gateway = jpa.getGateway();
-		OrchestratorDataResource dataResource = gateway.createOrchestratorData(experimentID); 
-		dataResource.setStatus(state.toString());
-		dataResource.save();
+//		OrchestratorDataResource dataResource = gateway.createOrchestratorData(experimentID);
+//		dataResource.setStatus(state.toString());
+//		dataResource.save();
 		return true; 
     }
     public boolean changeStatus(String experimentID, AiravataJobState.State state, String gfacEPR) throws RegistryException {
     	GatewayResource gateway = jpa.getGateway();
-		OrchestratorDataResource dataResource = gateway.createOrchestratorData(experimentID); 
-		dataResource.setStatus(state.toString());
-		dataResource.setGfacEPR(gfacEPR);
-		dataResource.save();
+//		OrchestratorDataResource dataResource = gateway.createOrchestratorData(experimentID);
+//		dataResource.setStatus(state.toString());
+//		dataResource.setGfacEPR(gfacEPR);
+//		dataResource.save();
 		return true; 
     }
 
     public AiravataJobState getState(String experimentID) throws RegistryException {
         GatewayResource gateway = jpa.getGateway();
-        OrchestratorDataResource resource = (OrchestratorDataResource)gateway.get(ResourceType.ORCHESTRATOR, experimentID);
-        AiravataJobState airavataJobState = new AiravataJobState();
-        airavataJobState.setJobState(AiravataJobState.State.valueOf(resource.getStatus()));
-        return airavataJobState;
+//        OrchestratorDataResource resource = (OrchestratorDataResource)gateway.get(ResourceType.ORCHESTRATOR, experimentID);
+//        AiravataJobState airavataJobState = new AiravataJobState();
+//        airavataJobState.setJobState(AiravataJobState.State.valueOf(resource.getStatus()));
+//        return airavataJobState;
+        return null;
     }
 
     public List<String> getAllJobsWithState(AiravataJobState state) throws RuntimeException {
-        List<Resource> orchestratorDataWithStatus = ResourceUtils.getOrchestratorDataWithStatus(state.toString());
+//        List<Resource> orchestratorDataWithStatus = ResourceUtils.getOrchestratorDataWithStatus(state.toString());
         List<String> jobsWithStatus = new ArrayList<String>();
-        for (Resource resource : orchestratorDataWithStatus){
-            String experimentID = ((OrchestratorDataResource) resource).getExperimentID();
-            jobsWithStatus.add(experimentID);
-        }
+//        for (Resource resource : orchestratorDataWithStatus){
+//            String experimentID = ((OrchestratorDataResource) resource).getExperimentID();
+//            jobsWithStatus.add(experimentID);
+//        }
         return jobsWithStatus;
     }
 
     public List<String> getAllAcceptedJobs() throws RegistryException {
-        List<Resource> acceptedJobs = ResourceUtils.getOrchestratorDataWithStatus(AiravataJobState.State.ACCEPTED.toString());
+//        List<Resource> acceptedJobs = ResourceUtils.getOrchestratorDataWithStatus(AiravataJobState.State.ACCEPTED.toString());
         List<String> acceptedJobIds = new ArrayList<String>();
-        for (Resource resource : acceptedJobs){
-            String experimentID = ((OrchestratorDataResource) resource).getExperimentID();
-            acceptedJobIds.add(experimentID);
-        }
+//        for (Resource resource : acceptedJobs){
+//            String experimentID = ((OrchestratorDataResource) resource).getExperimentID();
+//            acceptedJobIds.add(experimentID);
+//        }
         return acceptedJobIds;
     }
 
 
     public List<String> getAllHangedJobs() throws RegistryException {
-        List<Resource> hangedJobs = ResourceUtils.getOrchestratorDataWithStatus(AiravataJobState.State.UNKNOWN.toString());
+//        List<Resource> hangedJobs = ResourceUtils.getOrchestratorDataWithStatus(AiravataJobState.State.UNKNOWN.toString());
         List<String> hangedJobIds = new ArrayList<String>();
-        for (Resource resource : hangedJobs){
-            String experimentID = ((OrchestratorDataResource) resource).getExperimentID();
-            hangedJobIds.add(experimentID);
-        }
+//        for (Resource resource : hangedJobs){
+//            String experimentID = ((OrchestratorDataResource) resource).getExperimentID();
+//            hangedJobIds.add(experimentID);
+//        }
         return hangedJobIds;
     }
 
     public int getHangedJobCount() throws RegistryException {
-        List<Resource> hangedJobs = ResourceUtils.getOrchestratorDataWithStatus(AiravataJobState.State.HANGED.toString());
-        return hangedJobs.size();
+//        List<Resource> hangedJobs = ResourceUtils.getOrchestratorDataWithStatus(AiravataJobState.State.HANGED.toString());
+//        return hangedJobs.size();
+        return 0;
     }
 
     public boolean resetHangedJob(String experimentID) throws RegistryException {
         try {
             GatewayResource gatewayResource = jpa.getGateway();
-            OrchestratorDataResource orchestratorResource = (OrchestratorDataResource)gatewayResource.get(ResourceType.ORCHESTRATOR, experimentID);
-            orchestratorResource.setStatus(AiravataJobState.State.SUBMITTED.toString());
-            orchestratorResource.save();
+//            OrchestratorDataResource orchestratorResource = (OrchestratorDataResource)gatewayResource.get(ResourceType.ORCHESTRATOR, experimentID);
+//            orchestratorResource.setStatus(AiravataJobState.State.SUBMITTED.toString());
+//            orchestratorResource.save();
             return true;
         } catch (Exception e) {
            return false;


[5/8] new datamodels for AIRAVATA-1017

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Status.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Status.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Status.java
new file mode 100644
index 0000000..427c9d2
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Status.java
@@ -0,0 +1,170 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "STATUS")
+public class Status {
+    @Id
+    @GeneratedValue
+    @Column(name = "STATUS_ID")
+    private int statusId;
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Column(name = "NODE_INSTANCE_ID")
+    private String nodeId;
+    @Column(name = "TRANSFER_ID")
+    private String transferId;
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "JOB_ID")
+    private String jobId;
+    @Column(name = "STATE")
+    private String state;
+    @Column(name = "STATUS_UPDATE_TIME")
+    private Timestamp statusUpdateTime;
+    @Column(name = "STATUS_TYPE")
+    private String statusType;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TASK_ID")
+    private TaskDetail task;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "NODE_INSTANCE_ID")
+    private WorkflowNodeDetail node;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "TRANSFER_ID")
+    private DataTransferDetail transferDetail;
+
+    public int getStatusId() {
+        return statusId;
+    }
+
+    public void setStatusId(int statusId) {
+        this.statusId = statusId;
+    }
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public String getTransferId() {
+        return transferId;
+    }
+
+    public void setTransferId(String transferId) {
+        this.transferId = transferId;
+    }
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    public void setJobId(String jobId) {
+        this.jobId = jobId;
+    }
+
+    public String getState() {
+        return state;
+    }
+
+    public void setState(String state) {
+        this.state = state;
+    }
+
+    public Timestamp getStatusUpdateTime() {
+        return statusUpdateTime;
+    }
+
+    public void setStatusUpdateTime(Timestamp statusUpdateTime) {
+        this.statusUpdateTime = statusUpdateTime;
+    }
+
+    public String getStatusType() {
+        return statusType;
+    }
+
+    public void setStatusType(String statusType) {
+        this.statusType = statusType;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+
+    public TaskDetail getTask() {
+        return task;
+    }
+
+    public void setTask(TaskDetail task) {
+        this.task = task;
+    }
+
+    public WorkflowNodeDetail getNode() {
+        return node;
+    }
+
+    public void setNode(WorkflowNodeDetail node) {
+        this.node = node;
+    }
+
+    public DataTransferDetail getTransferDetail() {
+        return transferDetail;
+    }
+
+    public void setTransferDetail(DataTransferDetail transferDetail) {
+        this.transferDetail = transferDetail;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/TaskDetail.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/TaskDetail.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/TaskDetail.java
new file mode 100644
index 0000000..ae04b0e
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/TaskDetail.java
@@ -0,0 +1,93 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "TASK_DETAIL")
+public class TaskDetail {
+    @Id
+    @Column(name = "TASK_ID")
+    private String taskId;
+    @Column(name = "NODE_INSTANCE_ID")
+    private String nodeId;
+    @Column(name = "CREATION_TIME")
+    private Timestamp creationTime;
+    @Column(name = "APPLICATION_ID")
+    private String appId;
+    @Column(name = "APPLICATION_VERSION")
+    private String appVersion;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId = taskId;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public Timestamp getCreationTime() {
+        return creationTime;
+    }
+
+    public void setCreationTime(Timestamp creationTime) {
+        this.creationTime = creationTime;
+    }
+
+    public String getAppId() {
+        return appId;
+    }
+
+    public void setAppId(String appId) {
+        this.appId = appId;
+    }
+
+    public String getAppVersion() {
+        return appVersion;
+    }
+
+    public void setAppVersion(String appVersion) {
+        this.appVersion = appVersion;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/WorkflowNodeDetail.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/WorkflowNodeDetail.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/WorkflowNodeDetail.java
new file mode 100644
index 0000000..c3e4a4f
--- /dev/null
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/WorkflowNodeDetail.java
@@ -0,0 +1,83 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.persistance.registry.jpa.model;
+
+import javax.persistence.*;
+import java.sql.Timestamp;
+
+@Entity
+@Table(name = "WORKFLOW_NODE_DETAIL")
+public class WorkflowNodeDetail {
+    @Column(name = "EXPERIMENT_ID")
+    private String expId;
+    @Id
+    @Column(name = "NODE_INSTANCE_ID")
+    private String nodeId;
+    @Column(name = "CREATION_TIME")
+    private Timestamp creationTime;
+    @Column(name = "NODE_NAME")
+    private String nodeName;
+
+    @ManyToOne(cascade= CascadeType.MERGE)
+    @JoinColumn(name = "EXPERIMENT_ID")
+    private Experiment experiment;
+
+    public String getExpId() {
+        return expId;
+    }
+
+    public void setExpId(String expId) {
+        this.expId = expId;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public void setNodeId(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public Timestamp getCreationTime() {
+        return creationTime;
+    }
+
+    public void setCreationTime(Timestamp creationTime) {
+        this.creationTime = creationTime;
+    }
+
+    public String getNodeName() {
+        return nodeName;
+    }
+
+    public void setNodeName(String nodeName) {
+        this.nodeName = nodeName;
+    }
+
+    public Experiment getExperiment() {
+        return experiment;
+    }
+
+    public void setExperiment(Experiment experiment) {
+        this.experiment = experiment;
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Workflow_Data.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Workflow_Data.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Workflow_Data.java
deleted file mode 100644
index 54af452..0000000
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/model/Workflow_Data.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.persistance.registry.jpa.model;
-
-import java.sql.Timestamp;
-
-import javax.persistence.*;
-
-@Entity
-@Table(name ="WORKFLOW_DATA")
-public class Workflow_Data {
-
-	@ManyToOne(cascade = CascadeType.PERSIST)
-	@JoinColumn(name="EXPERIMENT_ID")
-    private Experiment_Metadata experiment_metadata;
-
-	@Id
-    @Column(name = "WORKFLOW_INSTANCE_ID")
-	private String workflow_instanceID;
-    @Column(name = "TEMPLATE_NAME")
-    private String template_name;
-    @Column(name = "STATUS")
-    private String status;
-    @Column(name = "START_TIME")
-    private Timestamp start_time;
-    @Column(name = "LAST_UPDATE_TIME")
-    private Timestamp last_update_time;
-
-
-	public String getWorkflow_instanceID() {
-		return workflow_instanceID;
-	}
-
-	public void setWorkflow_instanceID(String workflow_instanceID) {
-		this.workflow_instanceID = workflow_instanceID;
-	}
-
-	public String getTemplate_name() {
-		return template_name;
-	}
-
-	public void setTemplate_name(String template_name) {
-		this.template_name = template_name;
-	}
-
-	public String getStatus() {
-		return status;
-	}
-
-	public void setStatus(String status) {
-		this.status = status;
-	}
-
-	public Timestamp getStart_time() {
-		return start_time;
-	}
-
-	public void setStart_time(Timestamp start_time) {
-		this.start_time = start_time;
-	}
-
-	public Timestamp getLast_update_time() {
-		return last_update_time;
-	}
-
-	public void setLast_update_time(Timestamp last_update_time) {
-		this.last_update_time = last_update_time;
-	}
-
-    public Experiment_Metadata getExperiment_data() {
-        return experiment_metadata;
-    }
-
-    public void setExperiment_data(Experiment_Metadata experiment_data) {
-        this.experiment_metadata = experiment_data;
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExecutionErrorResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExecutionErrorResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExecutionErrorResource.java
index 8ffb241..35c0738 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExecutionErrorResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExecutionErrorResource.java
@@ -1,217 +1,217 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import java.sql.Timestamp;
-import java.util.List;
-
-import javax.persistence.EntityManager;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.Execution_Error;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
-import org.apache.airavata.persistance.registry.jpa.model.Workflow_Data;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ExecutionErrorResource extends AbstractResource {
-    private final static Logger logger = LoggerFactory.getLogger(ExecutionErrorResource.class);
-    private ExperimentMetadataResource metadataResource;
-    private WorkflowDataResource workflowDataResource;
-    private String nodeID;
-    private String gfacJobID;
-    private String sourceType;
-    private Timestamp errorTime;
-    private String errorMsg;
-    private String errorDes;
-    private String errorCode;
-    private int errorID;
-    private String errorReporter;
-    private String errorLocation;
-    private String actionTaken;
-    private int errorReference;
-
-    @Override
-    public Resource create(ResourceType type) {
-        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public List<Resource> get(ResourceType type) {
-        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-
-        Execution_Error execution_error = new Execution_Error();
-        execution_error.setNode_id(nodeID);
-        Experiment_Metadata experiment_data = em.find(Experiment_Metadata.class, metadataResource.getExpID());
-        execution_error.setExperiment_Metadata(experiment_data);
-        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
-        execution_error.setExperiment_ID(experiment_data.getExperiment_id());
-        execution_error.setWorkflow_Data(workflow_data);
-        execution_error.setWorkflow_instanceID(workflow_data.getWorkflow_instanceID());
-        execution_error.setError_code(errorCode);
-        execution_error.setError_date(errorTime);
-        execution_error.setError_des(errorDes);
-        execution_error.setError_msg(errorMsg);
-        execution_error.setSource_type(sourceType);
-        execution_error.setGfacJobID(gfacJobID);
-        em.persist(execution_error);
-        errorID = execution_error.getError_id();
-        // System.out.println("Error ID : " + errorID);
-        em.getTransaction().commit();
-        em.close();
-
-    }
-
-    public ExperimentMetadataResource getMetadataResource() {
-        return metadataResource;
-    }
-
-    public WorkflowDataResource getWorkflowDataResource() {
-        return workflowDataResource;
-    }
-
-    public String getNodeID() {
-        return nodeID;
-    }
-
-    public String getGfacJobID() {
-        return gfacJobID;
-    }
-
-    public String getSourceType() {
-        return sourceType;
-    }
-
-    public Timestamp getErrorTime() {
-        return errorTime;
-    }
-
-    public String getErrorMsg() {
-        return errorMsg;
-    }
-
-    public String getErrorDes() {
-        return errorDes;
-    }
-
-    public String getErrorCode() {
-        return errorCode;
-    }
-
-    public void setMetadataResource(ExperimentMetadataResource metadataResource) {
-        this.metadataResource = metadataResource;
-    }
-
-    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
-        this.workflowDataResource = workflowDataResource;
-    }
-
-    public void setNodeID(String nodeID) {
-        this.nodeID = nodeID;
-    }
-
-    public void setGfacJobID(String gfacJobID) {
-        this.gfacJobID = gfacJobID;
-    }
-
-    public void setSourceType(String sourceType) {
-        this.sourceType = sourceType;
-    }
-
-    public void setErrorTime(Timestamp errorTime) {
-        this.errorTime = errorTime;
-    }
-
-    public void setErrorMsg(String errorMsg) {
-        this.errorMsg = errorMsg;
-    }
-
-    public void setErrorDes(String errorDes) {
-        this.errorDes = errorDes;
-    }
-
-    public void setErrorCode(String errorCode) {
-        this.errorCode = errorCode;
-    }
-
-    public int getErrorID() {
-        return errorID;
-    }
-
-    public void setErrorID(int errorID) {
-        this.errorID = errorID;
-    }
-
-    public String getErrorReporter() {
-        return errorReporter;
-    }
-
-    public String getErrorLocation() {
-        return errorLocation;
-    }
-
-    public String getActionTaken() {
-        return actionTaken;
-    }
-
-    public void setErrorReporter(String errorReporter) {
-        this.errorReporter = errorReporter;
-    }
-
-    public void setErrorLocation(String errorLocation) {
-        this.errorLocation = errorLocation;
-    }
-
-    public void setActionTaken(String actionTaken) {
-        this.actionTaken = actionTaken;
-    }
-
-    public int getErrorReference() {
-        return errorReference;
-    }
-
-    public void setErrorReference(int errorReference) {
-        this.errorReference = errorReference;
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import java.sql.Timestamp;
+//import java.util.List;
+//
+//import javax.persistence.EntityManager;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.Execution_Error;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
+//import org.apache.airavata.persistance.registry.jpa.model.Workflow_Data;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class ExecutionErrorResource extends AbstractResource {
+//    private final static Logger logger = LoggerFactory.getLogger(ExecutionErrorResource.class);
+//    private ExperimentMetadataResource metadataResource;
+//    private WorkflowDataResource workflowDataResource;
+//    private String nodeID;
+//    private String gfacJobID;
+//    private String sourceType;
+//    private Timestamp errorTime;
+//    private String errorMsg;
+//    private String errorDes;
+//    private String errorCode;
+//    private int errorID;
+//    private String errorReporter;
+//    private String errorLocation;
+//    private String actionTaken;
+//    private int errorReference;
+//
+//    @Override
+//    public Resource create(ResourceType type) {
+//        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public List<Resource> get(ResourceType type) {
+//        logger.error("Unsupported resource type for node error resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    @Override
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//
+//        Execution_Error execution_error = new Execution_Error();
+//        execution_error.setNode_id(nodeID);
+//        Experiment_Metadata experiment_data = em.find(Experiment_Metadata.class, metadataResource.getExpID());
+//        execution_error.setExperiment_Metadata(experiment_data);
+//        Workflow_Data workflow_data = em.find(Workflow_Data.class, workflowDataResource.getWorkflowInstanceID());
+//        execution_error.setExperiment_ID(experiment_data.getExperiment_id());
+//        execution_error.setWorkflow_Data(workflow_data);
+//        execution_error.setWorkflow_instanceID(workflow_data.getWorkflow_instanceID());
+//        execution_error.setError_code(errorCode);
+//        execution_error.setError_date(errorTime);
+//        execution_error.setError_des(errorDes);
+//        execution_error.setError_msg(errorMsg);
+//        execution_error.setSource_type(sourceType);
+//        execution_error.setGfacJobID(gfacJobID);
+//        em.persist(execution_error);
+//        errorID = execution_error.getError_id();
+//        // System.out.println("Error ID : " + errorID);
+//        em.getTransaction().commit();
+//        em.close();
+//
+//    }
+//
+//    public ExperimentMetadataResource getMetadataResource() {
+//        return metadataResource;
+//    }
+//
+//    public WorkflowDataResource getWorkflowDataResource() {
+//        return workflowDataResource;
+//    }
+//
+//    public String getNodeID() {
+//        return nodeID;
+//    }
+//
+//    public String getGfacJobID() {
+//        return gfacJobID;
+//    }
+//
+//    public String getSourceType() {
+//        return sourceType;
+//    }
+//
+//    public Timestamp getErrorTime() {
+//        return errorTime;
+//    }
+//
+//    public String getErrorMsg() {
+//        return errorMsg;
+//    }
+//
+//    public String getErrorDes() {
+//        return errorDes;
+//    }
+//
+//    public String getErrorCode() {
+//        return errorCode;
+//    }
+//
+//    public void setMetadataResource(ExperimentMetadataResource metadataResource) {
+//        this.metadataResource = metadataResource;
+//    }
+//
+//    public void setWorkflowDataResource(WorkflowDataResource workflowDataResource) {
+//        this.workflowDataResource = workflowDataResource;
+//    }
+//
+//    public void setNodeID(String nodeID) {
+//        this.nodeID = nodeID;
+//    }
+//
+//    public void setGfacJobID(String gfacJobID) {
+//        this.gfacJobID = gfacJobID;
+//    }
+//
+//    public void setSourceType(String sourceType) {
+//        this.sourceType = sourceType;
+//    }
+//
+//    public void setErrorTime(Timestamp errorTime) {
+//        this.errorTime = errorTime;
+//    }
+//
+//    public void setErrorMsg(String errorMsg) {
+//        this.errorMsg = errorMsg;
+//    }
+//
+//    public void setErrorDes(String errorDes) {
+//        this.errorDes = errorDes;
+//    }
+//
+//    public void setErrorCode(String errorCode) {
+//        this.errorCode = errorCode;
+//    }
+//
+//    public int getErrorID() {
+//        return errorID;
+//    }
+//
+//    public void setErrorID(int errorID) {
+//        this.errorID = errorID;
+//    }
+//
+//    public String getErrorReporter() {
+//        return errorReporter;
+//    }
+//
+//    public String getErrorLocation() {
+//        return errorLocation;
+//    }
+//
+//    public String getActionTaken() {
+//        return actionTaken;
+//    }
+//
+//    public void setErrorReporter(String errorReporter) {
+//        this.errorReporter = errorReporter;
+//    }
+//
+//    public void setErrorLocation(String errorLocation) {
+//        this.errorLocation = errorLocation;
+//    }
+//
+//    public void setActionTaken(String actionTaken) {
+//        this.actionTaken = actionTaken;
+//    }
+//
+//    public int getErrorReference() {
+//        return errorReference;
+//    }
+//
+//    public void setErrorReference(int errorReference) {
+//        this.errorReference = errorReference;
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentConfigDataResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentConfigDataResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentConfigDataResource.java
index 07057b8..d13008f 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentConfigDataResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentConfigDataResource.java
@@ -1,389 +1,389 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Configuration_Data;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import java.sql.Timestamp;
-import java.util.List;
-
-public class ExperimentConfigDataResource extends AbstractResource {
-    private static final Logger logger = LoggerFactory.getLogger(ExperimentConfigDataResource.class);
-    private ExperimentMetadataResource exMetadata;
-    private String expID;
-    private String resourceHostID;
-    private int cpuCount;
-    private int nodeCount;
-    private int numberOfThreads;
-    private String queueName;
-    private int wallTimeLimit;
-    private Timestamp jobStartTime;
-    private int physicalMemory;
-    private String projectAccount;
-    private boolean airavataAutoSchedule;
-    private boolean overrideManualSchedule;
-    private String workingDir;
-    private boolean stageInputsToWDir;
-    private String outputDataDir;
-    private String dataRegURL;
-    private boolean persistOutputData;
-    private boolean cleanAfterJob;
-    private String applicationID;
-    private String applicationVersion;
-    private String workflowTemplateId;
-    private String workflowTemplateVersion;
-    private String workingDirParent;
-    private String startExecutionAt;
-    private String executeBefore;
-    private int numberOfRetries;
-
-    private byte[] request;
-
-    public String getExpID() {
-        return expID;
-    }
-
-    public void setExpID(String expID) {
-        this.expID = expID;
-    }
-
-    public ExperimentMetadataResource getExMetadata() {
-        return exMetadata;
-    }
-
-    public void setExMetadata(ExperimentMetadataResource exMetadata) {
-        this.exMetadata = exMetadata;
-    }
-
-    public String getResourceHostID() {
-        return resourceHostID;
-    }
-
-    public void setResourceHostID(String resourceHostID) {
-        this.resourceHostID = resourceHostID;
-    }
-
-    public int getCpuCount() {
-        return cpuCount;
-    }
-
-    public void setCpuCount(int cpuCount) {
-        this.cpuCount = cpuCount;
-    }
-
-    public int getNodeCount() {
-        return nodeCount;
-    }
-
-    public void setNodeCount(int nodeCount) {
-        this.nodeCount = nodeCount;
-    }
-
-    public int getNumberOfThreads() {
-        return numberOfThreads;
-    }
-
-    public void setNumberOfThreads(int numberOfThreads) {
-        this.numberOfThreads = numberOfThreads;
-    }
-
-    public String getQueueName() {
-        return queueName;
-    }
-
-    public void setQueueName(String queueName) {
-        this.queueName = queueName;
-    }
-
-    public int getWallTimeLimit() {
-        return wallTimeLimit;
-    }
-
-    public void setWallTimeLimit(int wallTimeLimit) {
-        this.wallTimeLimit = wallTimeLimit;
-    }
-
-    public Timestamp getJobStartTime() {
-        return jobStartTime;
-    }
-
-    public void setJobStartTime(Timestamp jobStartTime) {
-        this.jobStartTime = jobStartTime;
-    }
-
-    public int getPhysicalMemory() {
-        return physicalMemory;
-    }
-
-    public void setPhysicalMemory(int physicalMemory) {
-        this.physicalMemory = physicalMemory;
-    }
-
-    public String getProjectAccount() {
-        return projectAccount;
-    }
-
-    public void setProjectAccount(String projectAccount) {
-        this.projectAccount = projectAccount;
-    }
-
-    public boolean isAiravataAutoSchedule() {
-        return airavataAutoSchedule;
-    }
-
-    public void setAiravataAutoSchedule(boolean airavataAutoSchedule) {
-        this.airavataAutoSchedule = airavataAutoSchedule;
-    }
-
-    public boolean isOverrideManualSchedule() {
-        return overrideManualSchedule;
-    }
-
-    public void setOverrideManualSchedule(boolean overrideManualSchedule) {
-        this.overrideManualSchedule = overrideManualSchedule;
-    }
-
-    public String getWorkingDir() {
-        return workingDir;
-    }
-
-    public void setWorkingDir(String workingDir) {
-        this.workingDir = workingDir;
-    }
-
-    public boolean isStageInputsToWDir() {
-        return stageInputsToWDir;
-    }
-
-    public void setStageInputsToWDir(boolean stageInputsToWDir) {
-        this.stageInputsToWDir = stageInputsToWDir;
-    }
-
-    public String getOutputDataDir() {
-        return outputDataDir;
-    }
-
-    public void setOutputDataDir(String outputDataDir) {
-        this.outputDataDir = outputDataDir;
-    }
-
-    public String getDataRegURL() {
-        return dataRegURL;
-    }
-
-    public void setDataRegURL(String dataRegURL) {
-        this.dataRegURL = dataRegURL;
-    }
-
-    public boolean isPersistOutputData() {
-        return persistOutputData;
-    }
-
-    public void setPersistOutputData(boolean persistOutputData) {
-        this.persistOutputData = persistOutputData;
-    }
-
-    public boolean isCleanAfterJob() {
-        return cleanAfterJob;
-    }
-
-    public void setCleanAfterJob(boolean cleanAfterJob) {
-        this.cleanAfterJob = cleanAfterJob;
-    }
-
-    public byte[] getRequest() {
-        return request;
-    }
-
-    public void setRequest(byte[] request) {
-        this.request = request;
-    }
-
-    public static Logger getLogger() {
-        return logger;
-    }
-
-    public String getApplicationID() {
-        return applicationID;
-    }
-
-    public void setApplicationID(String applicationID) {
-        this.applicationID = applicationID;
-    }
-
-    public String getApplicationVersion() {
-        return applicationVersion;
-    }
-
-    public void setApplicationVersion(String applicationVersion) {
-        this.applicationVersion = applicationVersion;
-    }
-
-    public String getWorkflowTemplateId() {
-        return workflowTemplateId;
-    }
-
-    public void setWorkflowTemplateId(String workflowTemplateId) {
-        this.workflowTemplateId = workflowTemplateId;
-    }
-
-    public String getWorkflowTemplateVersion() {
-        return workflowTemplateVersion;
-    }
-
-    public void setWorkflowTemplateVersion(String workflowTemplateVersion) {
-        this.workflowTemplateVersion = workflowTemplateVersion;
-    }
-
-    public String getWorkingDirParent() {
-        return workingDirParent;
-    }
-
-    public void setWorkingDirParent(String workingDirParent) {
-        this.workingDirParent = workingDirParent;
-    }
-
-    public String getStartExecutionAt() {
-        return startExecutionAt;
-    }
-
-    public void setStartExecutionAt(String startExecutionAt) {
-        this.startExecutionAt = startExecutionAt;
-    }
-
-    public String getExecuteBefore() {
-        return executeBefore;
-    }
-
-    public void setExecuteBefore(String executeBefore) {
-        this.executeBefore = executeBefore;
-    }
-
-    public int getNumberOfRetries() {
-        return numberOfRetries;
-    }
-
-    public void setNumberOfRetries(int numberOfRetries) {
-        this.numberOfRetries = numberOfRetries;
-    }
-
-    public Resource create(ResourceType type) {
-        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();    }
-
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();    }
-
-    public List<Resource> get(ResourceType type) {
-        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();    }
-
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        Experiment_Configuration_Data existingConfig = em.find(Experiment_Configuration_Data.class, exMetadata.getExpID());
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Experiment_Configuration_Data exconfig = new Experiment_Configuration_Data();
-        exconfig.setAiravata_auto_schedule(isAiravataAutoSchedule());
-        exconfig.setClean_after_job(cleanAfterJob);
-        exconfig.setComputational_project_account(projectAccount);
-        exconfig.setData_reg_url(dataRegURL);
-        exconfig.setExperiment_config_data(request);
-        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, exMetadata.getExpID());
-        exconfig.setExperiment_metadata(metadata);
-        exconfig.setExperiment_id(metadata.getExperiment_id());
-        exconfig.setJob_start_time(jobStartTime);
-        exconfig.setNode_count(nodeCount);
-        exconfig.setNumber_of_threads(numberOfThreads);
-        exconfig.setOutput_data_dir(outputDataDir);
-        exconfig.setOverride_manual_schedule(overrideManualSchedule);
-        exconfig.setPersist_output_data(persistOutputData);
-        exconfig.setQueue_name(queueName);
-        exconfig.setResource_host_id(resourceHostID);
-        exconfig.setStage_input_files_to_working_dir(stageInputsToWDir);
-        exconfig.setTotal_cpu_count(cpuCount);
-        exconfig.setTotal_physical_memory(physicalMemory);
-        exconfig.setWalltime_limit(wallTimeLimit);
-        exconfig.setUnique_working_dir(workingDir);
-        exconfig.setWorking_dir_parent(workingDirParent);
-        exconfig.setApplication_id(applicationID);
-        exconfig.setApplication_version(applicationVersion);
-        exconfig.setWorkflow_template_id(workflowTemplateId);
-        exconfig.setWorkflow_template_version(workflowTemplateVersion);
-        exconfig.setStart_execution_at(startExecutionAt);
-        exconfig.setExecute_before(executeBefore);
-        exconfig.setNumber_of_retries(numberOfRetries);
-
-        if (existingConfig != null){
-            existingConfig.setAiravata_auto_schedule(isAiravataAutoSchedule());
-            existingConfig.setClean_after_job(cleanAfterJob);
-            existingConfig.setComputational_project_account(projectAccount);
-            existingConfig.setData_reg_url(dataRegURL);
-            existingConfig.setExperiment_config_data(request);
-            existingConfig.setExperiment_metadata(metadata);
-            existingConfig.setExperiment_id(metadata.getExperiment_id());
-            existingConfig.setJob_start_time(jobStartTime);
-            existingConfig.setNode_count(nodeCount);
-            existingConfig.setNumber_of_threads(numberOfThreads);
-            existingConfig.setOutput_data_dir(outputDataDir);
-            existingConfig.setOverride_manual_schedule(overrideManualSchedule);
-            existingConfig.setPersist_output_data(persistOutputData);
-            existingConfig.setQueue_name(queueName);
-            existingConfig.setResource_host_id(resourceHostID);
-            existingConfig.setStage_input_files_to_working_dir(stageInputsToWDir);
-            existingConfig.setTotal_cpu_count(cpuCount);
-            existingConfig.setTotal_physical_memory(physicalMemory);
-            existingConfig.setWalltime_limit(wallTimeLimit);
-            existingConfig.setUnique_working_dir(workingDir);
-            existingConfig.setUnique_working_dir(workingDir);
-            existingConfig.setWorking_dir_parent(workingDirParent);
-            existingConfig.setApplication_id(applicationID);
-            existingConfig.setApplication_version(applicationVersion);
-            existingConfig.setWorkflow_template_id(workflowTemplateId);
-            existingConfig.setWorkflow_template_version(workflowTemplateVersion);
-            existingConfig.setStart_execution_at(startExecutionAt);
-            existingConfig.setExecute_before(executeBefore);
-            existingConfig.setNumber_of_retries(numberOfRetries);
-            exconfig = em.merge(existingConfig);
-        }
-        else {
-           em.persist(exconfig);
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Configuration_Data;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import java.sql.Timestamp;
+//import java.util.List;
+//
+//public class ExperimentConfigDataResource extends AbstractResource {
+//    private static final Logger logger = LoggerFactory.getLogger(ExperimentConfigDataResource.class);
+//    private ExperimentMetadataResource exMetadata;
+//    private String expID;
+//    private String resourceHostID;
+//    private int cpuCount;
+//    private int nodeCount;
+//    private int numberOfThreads;
+//    private String queueName;
+//    private int wallTimeLimit;
+//    private Timestamp jobStartTime;
+//    private int physicalMemory;
+//    private String projectAccount;
+//    private boolean airavataAutoSchedule;
+//    private boolean overrideManualSchedule;
+//    private String workingDir;
+//    private boolean stageInputsToWDir;
+//    private String outputDataDir;
+//    private String dataRegURL;
+//    private boolean persistOutputData;
+//    private boolean cleanAfterJob;
+//    private String applicationID;
+//    private String applicationVersion;
+//    private String workflowTemplateId;
+//    private String workflowTemplateVersion;
+//    private String workingDirParent;
+//    private String startExecutionAt;
+//    private String executeBefore;
+//    private int numberOfRetries;
+//
+//    private byte[] request;
+//
+//    public String getExpID() {
+//        return expID;
+//    }
+//
+//    public void setExpID(String expID) {
+//        this.expID = expID;
+//    }
+//
+//    public ExperimentMetadataResource getExMetadata() {
+//        return exMetadata;
+//    }
+//
+//    public void setExMetadata(ExperimentMetadataResource exMetadata) {
+//        this.exMetadata = exMetadata;
+//    }
+//
+//    public String getResourceHostID() {
+//        return resourceHostID;
+//    }
+//
+//    public void setResourceHostID(String resourceHostID) {
+//        this.resourceHostID = resourceHostID;
+//    }
+//
+//    public int getCpuCount() {
+//        return cpuCount;
+//    }
+//
+//    public void setCpuCount(int cpuCount) {
+//        this.cpuCount = cpuCount;
+//    }
+//
+//    public int getNodeCount() {
+//        return nodeCount;
+//    }
+//
+//    public void setNodeCount(int nodeCount) {
+//        this.nodeCount = nodeCount;
+//    }
+//
+//    public int getNumberOfThreads() {
+//        return numberOfThreads;
+//    }
+//
+//    public void setNumberOfThreads(int numberOfThreads) {
+//        this.numberOfThreads = numberOfThreads;
+//    }
+//
+//    public String getQueueName() {
+//        return queueName;
+//    }
+//
+//    public void setQueueName(String queueName) {
+//        this.queueName = queueName;
+//    }
+//
+//    public int getWallTimeLimit() {
+//        return wallTimeLimit;
+//    }
+//
+//    public void setWallTimeLimit(int wallTimeLimit) {
+//        this.wallTimeLimit = wallTimeLimit;
+//    }
+//
+//    public Timestamp getJobStartTime() {
+//        return jobStartTime;
+//    }
+//
+//    public void setJobStartTime(Timestamp jobStartTime) {
+//        this.jobStartTime = jobStartTime;
+//    }
+//
+//    public int getPhysicalMemory() {
+//        return physicalMemory;
+//    }
+//
+//    public void setPhysicalMemory(int physicalMemory) {
+//        this.physicalMemory = physicalMemory;
+//    }
+//
+//    public String getProjectAccount() {
+//        return projectAccount;
+//    }
+//
+//    public void setProjectAccount(String projectAccount) {
+//        this.projectAccount = projectAccount;
+//    }
+//
+//    public boolean isAiravataAutoSchedule() {
+//        return airavataAutoSchedule;
+//    }
+//
+//    public void setAiravataAutoSchedule(boolean airavataAutoSchedule) {
+//        this.airavataAutoSchedule = airavataAutoSchedule;
+//    }
+//
+//    public boolean isOverrideManualSchedule() {
+//        return overrideManualSchedule;
+//    }
+//
+//    public void setOverrideManualSchedule(boolean overrideManualSchedule) {
+//        this.overrideManualSchedule = overrideManualSchedule;
+//    }
+//
+//    public String getWorkingDir() {
+//        return workingDir;
+//    }
+//
+//    public void setWorkingDir(String workingDir) {
+//        this.workingDir = workingDir;
+//    }
+//
+//    public boolean isStageInputsToWDir() {
+//        return stageInputsToWDir;
+//    }
+//
+//    public void setStageInputsToWDir(boolean stageInputsToWDir) {
+//        this.stageInputsToWDir = stageInputsToWDir;
+//    }
+//
+//    public String getOutputDataDir() {
+//        return outputDataDir;
+//    }
+//
+//    public void setOutputDataDir(String outputDataDir) {
+//        this.outputDataDir = outputDataDir;
+//    }
+//
+//    public String getDataRegURL() {
+//        return dataRegURL;
+//    }
+//
+//    public void setDataRegURL(String dataRegURL) {
+//        this.dataRegURL = dataRegURL;
+//    }
+//
+//    public boolean isPersistOutputData() {
+//        return persistOutputData;
+//    }
+//
+//    public void setPersistOutputData(boolean persistOutputData) {
+//        this.persistOutputData = persistOutputData;
+//    }
+//
+//    public boolean isCleanAfterJob() {
+//        return cleanAfterJob;
+//    }
+//
+//    public void setCleanAfterJob(boolean cleanAfterJob) {
+//        this.cleanAfterJob = cleanAfterJob;
+//    }
+//
+//    public byte[] getRequest() {
+//        return request;
+//    }
+//
+//    public void setRequest(byte[] request) {
+//        this.request = request;
+//    }
+//
+//    public static Logger getLogger() {
+//        return logger;
+//    }
+//
+//    public String getApplicationID() {
+//        return applicationID;
+//    }
+//
+//    public void setApplicationID(String applicationID) {
+//        this.applicationID = applicationID;
+//    }
+//
+//    public String getApplicationVersion() {
+//        return applicationVersion;
+//    }
+//
+//    public void setApplicationVersion(String applicationVersion) {
+//        this.applicationVersion = applicationVersion;
+//    }
+//
+//    public String getWorkflowTemplateId() {
+//        return workflowTemplateId;
+//    }
+//
+//    public void setWorkflowTemplateId(String workflowTemplateId) {
+//        this.workflowTemplateId = workflowTemplateId;
+//    }
+//
+//    public String getWorkflowTemplateVersion() {
+//        return workflowTemplateVersion;
+//    }
+//
+//    public void setWorkflowTemplateVersion(String workflowTemplateVersion) {
+//        this.workflowTemplateVersion = workflowTemplateVersion;
+//    }
+//
+//    public String getWorkingDirParent() {
+//        return workingDirParent;
+//    }
+//
+//    public void setWorkingDirParent(String workingDirParent) {
+//        this.workingDirParent = workingDirParent;
+//    }
+//
+//    public String getStartExecutionAt() {
+//        return startExecutionAt;
+//    }
+//
+//    public void setStartExecutionAt(String startExecutionAt) {
+//        this.startExecutionAt = startExecutionAt;
+//    }
+//
+//    public String getExecuteBefore() {
+//        return executeBefore;
+//    }
+//
+//    public void setExecuteBefore(String executeBefore) {
+//        this.executeBefore = executeBefore;
+//    }
+//
+//    public int getNumberOfRetries() {
+//        return numberOfRetries;
+//    }
+//
+//    public void setNumberOfRetries(int numberOfRetries) {
+//        this.numberOfRetries = numberOfRetries;
+//    }
+//
+//    public Resource create(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();    }
+//
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();    }
+//
+//    public List<Resource> get(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment config data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();    }
+//
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        Experiment_Configuration_Data existingConfig = em.find(Experiment_Configuration_Data.class, exMetadata.getExpID());
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Experiment_Configuration_Data exconfig = new Experiment_Configuration_Data();
+//        exconfig.setAiravata_auto_schedule(isAiravataAutoSchedule());
+//        exconfig.setClean_after_job(cleanAfterJob);
+//        exconfig.setComputational_project_account(projectAccount);
+//        exconfig.setData_reg_url(dataRegURL);
+//        exconfig.setExperiment_config_data(request);
+//        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, exMetadata.getExpID());
+//        exconfig.setExperiment_metadata(metadata);
+//        exconfig.setExperiment_id(metadata.getExperiment_id());
+//        exconfig.setJob_start_time(jobStartTime);
+//        exconfig.setNode_count(nodeCount);
+//        exconfig.setNumber_of_threads(numberOfThreads);
+//        exconfig.setOutput_data_dir(outputDataDir);
+//        exconfig.setOverride_manual_schedule(overrideManualSchedule);
+//        exconfig.setPersist_output_data(persistOutputData);
+//        exconfig.setQueue_name(queueName);
+//        exconfig.setResource_host_id(resourceHostID);
+//        exconfig.setStage_input_files_to_working_dir(stageInputsToWDir);
+//        exconfig.setTotal_cpu_count(cpuCount);
+//        exconfig.setTotal_physical_memory(physicalMemory);
+//        exconfig.setWalltime_limit(wallTimeLimit);
+//        exconfig.setUnique_working_dir(workingDir);
+//        exconfig.setWorking_dir_parent(workingDirParent);
+//        exconfig.setApplication_id(applicationID);
+//        exconfig.setApplication_version(applicationVersion);
+//        exconfig.setWorkflow_template_id(workflowTemplateId);
+//        exconfig.setWorkflow_template_version(workflowTemplateVersion);
+//        exconfig.setStart_execution_at(startExecutionAt);
+//        exconfig.setExecute_before(executeBefore);
+//        exconfig.setNumber_of_retries(numberOfRetries);
+//
+//        if (existingConfig != null){
+//            existingConfig.setAiravata_auto_schedule(isAiravataAutoSchedule());
+//            existingConfig.setClean_after_job(cleanAfterJob);
+//            existingConfig.setComputational_project_account(projectAccount);
+//            existingConfig.setData_reg_url(dataRegURL);
+//            existingConfig.setExperiment_config_data(request);
+//            existingConfig.setExperiment_metadata(metadata);
+//            existingConfig.setExperiment_id(metadata.getExperiment_id());
+//            existingConfig.setJob_start_time(jobStartTime);
+//            existingConfig.setNode_count(nodeCount);
+//            existingConfig.setNumber_of_threads(numberOfThreads);
+//            existingConfig.setOutput_data_dir(outputDataDir);
+//            existingConfig.setOverride_manual_schedule(overrideManualSchedule);
+//            existingConfig.setPersist_output_data(persistOutputData);
+//            existingConfig.setQueue_name(queueName);
+//            existingConfig.setResource_host_id(resourceHostID);
+//            existingConfig.setStage_input_files_to_working_dir(stageInputsToWDir);
+//            existingConfig.setTotal_cpu_count(cpuCount);
+//            existingConfig.setTotal_physical_memory(physicalMemory);
+//            existingConfig.setWalltime_limit(wallTimeLimit);
+//            existingConfig.setUnique_working_dir(workingDir);
+//            existingConfig.setUnique_working_dir(workingDir);
+//            existingConfig.setWorking_dir_parent(workingDirParent);
+//            existingConfig.setApplication_id(applicationID);
+//            existingConfig.setApplication_version(applicationVersion);
+//            existingConfig.setWorkflow_template_id(workflowTemplateId);
+//            existingConfig.setWorkflow_template_version(workflowTemplateVersion);
+//            existingConfig.setStart_execution_at(startExecutionAt);
+//            existingConfig.setExecute_before(executeBefore);
+//            existingConfig.setNumber_of_retries(numberOfRetries);
+//            exconfig = em.merge(existingConfig);
+//        }
+//        else {
+//           em.persist(exconfig);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/a0c1cbde/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentInputResource.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentInputResource.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentInputResource.java
index 36c872b..6c7aa35 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentInputResource.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/resources/ExperimentInputResource.java
@@ -1,113 +1,113 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.persistance.registry.jpa.resources;
-
-import org.apache.airavata.persistance.registry.jpa.Resource;
-import org.apache.airavata.persistance.registry.jpa.ResourceType;
-import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Input;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Input_PK;
-import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.persistence.EntityManager;
-import java.util.List;
-
-public class ExperimentInputResource extends AbstractResource {
-    private static final Logger logger = LoggerFactory.getLogger(ExperimentInputResource.class);
-
-    private ExperimentMetadataResource experimentMetadataResource;
-    private String experimentKey;
-    private String value;
-
-    public ExperimentMetadataResource getExperimentMetadataResource() {
-        return experimentMetadataResource;
-    }
-
-    public void setExperimentMetadataResource(ExperimentMetadataResource experimentMetadataResource) {
-        this.experimentMetadataResource = experimentMetadataResource;
-    }
-
-    public String getExperimentKey() {
-        return experimentKey;
-    }
-
-    public void setExperimentKey(String experimentKey) {
-        this.experimentKey = experimentKey;
-    }
-
-    public String getValue() {
-        return value;
-    }
-
-    public void setValue(String value) {
-        this.value = value;
-    }
-
-    public Resource create(ResourceType type) {
-        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public void remove(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public Resource get(ResourceType type, Object name) {
-        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public List<Resource> get(ResourceType type) {
-        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
-        throw new UnsupportedOperationException();
-    }
-
-    public void save() {
-        EntityManager em = ResourceUtils.getEntityManager();
-        Experiment_Input existingInput = em.find(Experiment_Input.class, new Experiment_Input_PK(experimentMetadataResource.getExpID(), experimentKey));
-        em.close();
-
-        em = ResourceUtils.getEntityManager();
-        em.getTransaction().begin();
-        Experiment_Input exInput = new Experiment_Input();
-        exInput.setEx_key(experimentKey);
-        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, experimentMetadataResource.getExpID());
-        exInput.setExperiment_metadata(metadata);
-        exInput.setExperiment_id(metadata.getExperiment_id());
-        exInput.setValue(value);
-
-        if (existingInput != null){
-            existingInput.setEx_key(experimentKey);
-            existingInput.setExperiment_metadata(metadata);
-            existingInput.setExperiment_id(metadata.getExperiment_id());
-            existingInput.setValue(value);
-            exInput = em.merge(existingInput);
-        }else {
-            em.persist(exInput);
-        }
-        em.getTransaction().commit();
-        em.close();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.persistance.registry.jpa.resources;
+//
+//import org.apache.airavata.persistance.registry.jpa.Resource;
+//import org.apache.airavata.persistance.registry.jpa.ResourceType;
+//import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Input;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Input_PK;
+//import org.apache.airavata.persistance.registry.jpa.model.Experiment_Metadata;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import javax.persistence.EntityManager;
+//import java.util.List;
+//
+//public class ExperimentInputResource extends AbstractResource {
+//    private static final Logger logger = LoggerFactory.getLogger(ExperimentInputResource.class);
+//
+//    private ExperimentMetadataResource experimentMetadataResource;
+//    private String experimentKey;
+//    private String value;
+//
+//    public ExperimentMetadataResource getExperimentMetadataResource() {
+//        return experimentMetadataResource;
+//    }
+//
+//    public void setExperimentMetadataResource(ExperimentMetadataResource experimentMetadataResource) {
+//        this.experimentMetadataResource = experimentMetadataResource;
+//    }
+//
+//    public String getExperimentKey() {
+//        return experimentKey;
+//    }
+//
+//    public void setExperimentKey(String experimentKey) {
+//        this.experimentKey = experimentKey;
+//    }
+//
+//    public String getValue() {
+//        return value;
+//    }
+//
+//    public void setValue(String value) {
+//        this.value = value;
+//    }
+//
+//    public Resource create(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void remove(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public Resource get(ResourceType type, Object name) {
+//        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public List<Resource> get(ResourceType type) {
+//        logger.error("Unsupported resource type for experiment input data resource.", new UnsupportedOperationException());
+//        throw new UnsupportedOperationException();
+//    }
+//
+//    public void save() {
+//        EntityManager em = ResourceUtils.getEntityManager();
+//        Experiment_Input existingInput = em.find(Experiment_Input.class, new Experiment_Input_PK(experimentMetadataResource.getExpID(), experimentKey));
+//        em.close();
+//
+//        em = ResourceUtils.getEntityManager();
+//        em.getTransaction().begin();
+//        Experiment_Input exInput = new Experiment_Input();
+//        exInput.setEx_key(experimentKey);
+//        Experiment_Metadata metadata = em.find(Experiment_Metadata.class, experimentMetadataResource.getExpID());
+//        exInput.setExperiment_metadata(metadata);
+//        exInput.setExperiment_id(metadata.getExperiment_id());
+//        exInput.setValue(value);
+//
+//        if (existingInput != null){
+//            existingInput.setEx_key(experimentKey);
+//            existingInput.setExperiment_metadata(metadata);
+//            existingInput.setExperiment_id(metadata.getExperiment_id());
+//            existingInput.setValue(value);
+//            exInput = em.merge(existingInput);
+//        }else {
+//            em.persist(exInput);
+//        }
+//        em.getTransaction().commit();
+//        em.close();
+//    }
+//}