You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by ch...@apache.org on 2015/06/24 17:19:08 UTC

[2/2] airavata git commit: fixing build issues

fixing build issues


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/ab322333
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/ab322333
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/ab322333

Branch: refs/heads/master
Commit: ab3223339fe39a15df9ab54387bc3830ec20114f
Parents: df3fbe6
Author: Chathuri Wimalasena <ch...@apache.org>
Authored: Wed Jun 24 11:19:01 2015 -0400
Committer: Chathuri Wimalasena <ch...@apache.org>
Committed: Wed Jun 24 11:19:01 2015 -0400

----------------------------------------------------------------------
 .../core/validator/JobMetadataValidator.java    |  10 +-
 .../validator/impl/BatchQueueValidator.java     |  15 +-
 .../impl/ExperimentStatusValidator.java         |  12 +-
 .../airavata/orchestrator/cpi/Orchestrator.java |  22 +-
 .../cpi/impl/SimpleOrchestratorImpl.java        | 100 +--
 .../server/OrchestratorServerHandler.java       | 624 +++++++++----------
 .../orchestrator/util/DataModelUtils.java       |   4 +-
 .../OrchestratorServerThreadPoolExecutor.java   |   6 +-
 .../engine/interpretor/WorkflowInterpreter.java |  28 +-
 .../engine/invoker/WorkflowInputUtil.java       |   2 +-
 .../model/component/ComponentDataPort.java      |   2 +-
 .../amazon/InstanceComponentDataPort.java       |   2 +-
 .../component/dynamic/DynamicComponentPort.java |   2 +-
 .../component/ws/WSComponentApplication.java    |   6 +-
 .../ws/WSComponentApplicationParameter.java     |   3 +-
 .../model/component/ws/WSComponentPort.java     |   2 +-
 .../airavata/workflow/model/graph/DataPort.java |   2 +-
 .../airavata/workflow/model/graph/EPRPort.java  |   2 +-
 .../model/graph/amazon/InstanceDataPort.java    |   2 +-
 .../model/graph/dynamic/DynamicPort.java        |   2 +-
 .../model/graph/system/ConstantNode.java        |   2 +-
 .../model/graph/system/DifferedInputNode.java   |   2 +-
 .../model/graph/system/EndBlockNode.java        |   2 +-
 .../model/graph/system/EndDoWhileNode.java      |   2 +-
 .../model/graph/system/EndForEachNode.java      |   2 +-
 .../workflow/model/graph/system/EndifNode.java  |   2 +-
 .../workflow/model/graph/system/OutputNode.java |   2 +-
 .../model/graph/system/ParameterNode.java       |   3 +-
 .../model/graph/system/SystemDataPort.java      |   3 +-
 .../workflow/model/graph/system/SystemNode.java |   3 +-
 .../workflow/model/graph/ws/WSPort.java         |   2 +-
 .../workflow/model/wf/WorkflowInput.java        |   3 +-
 .../core/dag/nodes/WorkflowInputNode.java       |   2 +-
 .../core/dag/nodes/WorkflowInputNodeImpl.java   |   2 +-
 .../core/dag/nodes/WorkflowOutputNode.java      |   2 +-
 .../core/dag/nodes/WorkflowOutputNodeImpl.java  |   2 +-
 .../airavata/workflow/core/dag/port/InPort.java |   2 +-
 .../workflow/core/dag/port/InputPortIml.java    |   2 +-
 .../workflow/core/dag/port/OutPort.java         |   2 +-
 .../workflow/core/dag/port/OutPortImpl.java     |   2 +-
 .../core/parser/AiravataWorkflowParser.java     |  16 +-
 .../core/parser/AiravataWorkflowParserTest.java |   6 +-
 pom.xml                                         |   8 +-
 43 files changed, 449 insertions(+), 473 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/JobMetadataValidator.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/JobMetadataValidator.java b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/JobMetadataValidator.java
index f3f037c..a897a19 100644
--- a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/JobMetadataValidator.java
+++ b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/JobMetadataValidator.java
@@ -22,8 +22,8 @@ package org.apache.airavata.orchestrator.core.validator;
 
 import org.apache.airavata.model.error.ValidationResults;
 import org.apache.airavata.model.experiment.ExperimentModel;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.model.experiment.WorkflowNodeDetails;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.task.TaskModel;
 
 /**
  * This is the interface to implement a validation logic, users can implement their validation
@@ -38,9 +38,9 @@ public interface JobMetadataValidator {
      * and set the results as failed (false) and return in, orchestrator will wrap them to an Exception and
      * thrown to the client side
      * @param experiment
-     * @param workflowNodeDetail
-     * @param taskID
+     * @param processModel
+     * @param taskModel
      * @return
      */
-    ValidationResults validate(Experiment experiment, WorkflowNodeDetails workflowNodeDetail, TaskDetails taskID);
+    ValidationResults validate(ExperimentModel experiment, ProcessModel processModel, TaskModel taskModel);
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/BatchQueueValidator.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/BatchQueueValidator.java b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/BatchQueueValidator.java
index 46f4360..664fcfe 100644
--- a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/BatchQueueValidator.java
+++ b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/BatchQueueValidator.java
@@ -25,6 +25,9 @@ import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescr
 import org.apache.airavata.model.error.ValidationResults;
 import org.apache.airavata.model.error.ValidatorResult;
 import org.apache.airavata.model.experiment.*;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel;
+import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.orchestrator.core.validator.JobMetadataValidator;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.*;
@@ -51,11 +54,11 @@ public class BatchQueueValidator implements JobMetadataValidator {
         }
     }
 
-    public ValidationResults validate(Experiment experiment, WorkflowNodeDetails workflowNodeDetail, TaskDetails taskID) {
+    public ValidationResults validate(ExperimentModel experiment, ProcessModel processModel, TaskModel taskModel) {
         ValidationResults validationResults = new ValidationResults();
         validationResults.setValidationState(true);
         try {
-            List<ValidatorResult> validatorResultList = validateUserConfiguration(experiment, taskID);
+            List<ValidatorResult> validatorResultList = validateUserConfiguration(experiment, processModel);
             for (ValidatorResult result : validatorResultList){
                 if (!result.isResult()){
                     validationResults.setValidationState(false);
@@ -69,18 +72,18 @@ public class BatchQueueValidator implements JobMetadataValidator {
         return validationResults;
     }
 
-    private List<ValidatorResult> validateUserConfiguration (Experiment experiment, TaskDetails taskDetail) throws AppCatalogException{
+    private List<ValidatorResult> validateUserConfiguration (ExperimentModel experiment, ProcessModel processModel) throws AppCatalogException{
         List<ValidatorResult> validatorResultList = new ArrayList<ValidatorResult>();
         try {
-            UserConfigurationData userConfigurationData = experiment.getUserConfigurationData();
-            ComputationalResourceScheduling computationalResourceScheduling = userConfigurationData.getComputationalResourceScheduling();
+            UserConfigurationDataModel userConfigurationData = experiment.getUserConfigurationData();
+            ComputationalResourceSchedulingModel computationalResourceScheduling = userConfigurationData.getComputationalResourceScheduling();
             if (userConfigurationData.isAiravataAutoSchedule()) {
                 logger.error("Auto-Schedule is not yet supported. Experiment will proceed with provided scheduling information");
                 ValidatorResult validatorResult = new ValidatorResult();
                 validatorResult.setResult(false);
                 validatorResultList.add(validatorResult);
             }
-            ComputeResourceDescription computeResource = appCatalog.getComputeResource().getComputeResource(taskDetail.getTaskScheduling().getResourceHostId());
+            ComputeResourceDescription computeResource = appCatalog.getComputeResource().getComputeResource(processModel.getResourceSchedule().getResourceHostId());
             List<BatchQueue> batchQueues = computeResource.getBatchQueues();
 
             if (batchQueues != null && !batchQueues.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/ExperimentStatusValidator.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/ExperimentStatusValidator.java b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/ExperimentStatusValidator.java
index 7c01f72..faae1bf 100644
--- a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/ExperimentStatusValidator.java
+++ b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/validator/impl/ExperimentStatusValidator.java
@@ -23,9 +23,9 @@ package org.apache.airavata.orchestrator.core.validator.impl;
 import org.apache.airavata.model.error.ValidationResults;
 import org.apache.airavata.model.error.ValidatorResult;
 import org.apache.airavata.model.experiment.ExperimentModel;
-import org.apache.airavata.model.experiment.ExperimentState;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.model.experiment.WorkflowNodeDetails;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.status.ExperimentState;
+import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.orchestrator.core.validator.JobMetadataValidator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -36,14 +36,14 @@ import java.util.List;
 public class ExperimentStatusValidator implements JobMetadataValidator {
     private static Logger log = LoggerFactory.getLogger(ExperimentStatusValidator.class);
 
-    public ValidationResults validate(Experiment experiment, WorkflowNodeDetails workflowNodeDetail, TaskDetails taskID) {
+    public ValidationResults validate(ExperimentModel experiment, ProcessModel processModel, TaskModel taskModel) {
         String error = "During the validation step experiment status should be CREATED, But this experiment status is : ";
         ValidationResults validationResults = new ValidationResults();
         validationResults.setValidationState(true);
         ValidatorResult validatorResult = new ValidatorResult();
         List<ValidatorResult> validatorResultList = new ArrayList<ValidatorResult>();
-        if (!experiment.getExperimentStatus().getExperimentState().equals(ExperimentState.CREATED)) {
-            error += experiment.getExperimentStatus().getExperimentState().toString();
+        if (!experiment.getExperimentStatus().getState().equals(ExperimentState.CREATED)) {
+            error += experiment.getExperimentStatus().getState().toString();
             log.error(error);
             validatorResult.setErrorDetails(error);
             validatorResult.setResult(false);

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/Orchestrator.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/Orchestrator.java b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/Orchestrator.java
index c923411..7bb9d5f 100644
--- a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/Orchestrator.java
+++ b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/Orchestrator.java
@@ -23,8 +23,8 @@ package org.apache.airavata.orchestrator.cpi;
 import org.apache.airavata.model.error.LaunchValidationException;
 import org.apache.airavata.model.error.ValidationResults;
 import org.apache.airavata.model.experiment.ExperimentModel;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.model.experiment.WorkflowNodeDetails;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
 
 import javax.validation.Validation;
@@ -40,13 +40,13 @@ public interface Orchestrator {
      * This method can be used to run all custom validators plugged in to the orchestrator and make
      * sure the experiment is ready to launch and if its not this will return false
      * @param experiment
-     * @param workflowNodeDetail
-     * @param taskID
+     * @param processModel
+     * @param taskModel
      * @return boolean if the experiments are valids after executing all the validators return true otherwise it iwll return false
      * @throws OrchestratorException
      */
-     ValidationResults validateExperiment(Experiment experiment, WorkflowNodeDetails workflowNodeDetail,
-                            TaskDetails taskID) throws OrchestratorException,LaunchValidationException;
+     ValidationResults validateExperiment(ExperimentModel experiment, ProcessModel processModel,
+                            TaskModel taskModel) throws OrchestratorException,LaunchValidationException;
     /**
      * After creating the experiment Data user have the
      * experimentID as the handler to the experiment, during the launchExperiment
@@ -56,8 +56,8 @@ public interface Orchestrator {
      * @return launchExperiment status
      * @throws OrchestratorException
      */
-    boolean launchExperiment(Experiment experiment, WorkflowNodeDetails workflowNodeDetail,
-                            TaskDetails taskID,String tokenId) throws OrchestratorException;
+    boolean launchExperiment(ExperimentModel experiment, ProcessModel processModel,
+                            TaskModel taskModel,String tokenId) throws OrchestratorException;
 
 
     /**
@@ -67,7 +67,7 @@ public interface Orchestrator {
      * @return
      * @throws OrchestratorException
      */
-    public List<TaskDetails> createTasks(String experimentId) throws OrchestratorException;
+    public List<TaskModel> createTasks(String experimentId) throws OrchestratorException;
 
     /**
      * After creating the experiment Data user have the
@@ -75,12 +75,12 @@ public interface Orchestrator {
      * We just have to give the experimentID
      *
      * @param experiment
-     * @param workflowNode
+     * @param processModel
      * @param task
      * @param tokenId
      * @throws OrchestratorException
      */
-    void cancelExperiment(Experiment experiment, WorkflowNodeDetails workflowNode, TaskDetails task,String tokenId) throws OrchestratorException;
+    void cancelExperiment(ExperimentModel experiment, ProcessModel processModel, TaskModel task,String tokenId) throws OrchestratorException;
     //todo have to add another method to handle failed or jobs to be recovered by orchestrator
     //todo if you don't add these this is not an orchestrator, its just an intemediate component which invoke gfac
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/impl/SimpleOrchestratorImpl.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/impl/SimpleOrchestratorImpl.java b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/impl/SimpleOrchestratorImpl.java
index e7d9049..bed31ce 100644
--- a/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/impl/SimpleOrchestratorImpl.java
+++ b/modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/impl/SimpleOrchestratorImpl.java
@@ -20,9 +20,12 @@
 */
 package org.apache.airavata.orchestrator.cpi.impl;
 
+import org.apache.airavata.model.commons.ErrorModel;
 import org.apache.airavata.model.error.LaunchValidationException;
 import org.apache.airavata.model.error.ValidationResults;
 import org.apache.airavata.model.error.ValidatorResult;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.model.util.ExperimentModelUtil;
 import org.apache.airavata.model.experiment.*;
 import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
@@ -64,10 +67,10 @@ public class SimpleOrchestratorImpl extends AbstractOrchestrator{
         }
     }
 
-    public boolean launchExperiment(Experiment experiment, WorkflowNodeDetails workflowNode, TaskDetails task,String tokenId) throws OrchestratorException {
+    public boolean launchExperiment(ExperimentModel experiment, ProcessModel processModel, TaskModel task,String tokenId) throws OrchestratorException {
         // we give higher priority to userExperimentID
-        String experimentId = experiment.getExperimentID();
-        String taskId = task.getTaskID();
+        String experimentId = experiment.getExperimentId();
+        String taskId = task.getTaskId();
         // creating monitorID to register with monitoring queue
         // this is a special case because amqp has to be in place before submitting the job
         try {
@@ -85,36 +88,37 @@ public class SimpleOrchestratorImpl extends AbstractOrchestrator{
      * @return
      * @throws OrchestratorException
      */
-    public List<TaskDetails> createTasks(String experimentId) throws OrchestratorException {
-        Experiment experiment = null;
-        List<TaskDetails> tasks = new ArrayList<TaskDetails>();
-        try {
-            Registry newRegistry = orchestratorContext.getNewRegistry();
-            experiment = (Experiment) newRegistry.getExperimentCatalog().get(ExperimentCatalogModelType.EXPERIMENT, experimentId);
-            List<WorkflowNodeDetails> workflowNodeDetailsList = experiment.getWorkflowNodeDetailsList();
-            if (workflowNodeDetailsList != null && !workflowNodeDetailsList.isEmpty()){
-                for (WorkflowNodeDetails wfn : workflowNodeDetailsList){
-                    List<TaskDetails> taskDetailsList = wfn.getTaskDetailsList();
-                    if (taskDetailsList != null && !taskDetailsList.isEmpty()){
-                        return taskDetailsList;
-                    }
-                }
-            }else {
-                WorkflowNodeDetails iDontNeedaNode = ExperimentModelUtil.createWorkflowNode("tempNode", null);
-                String nodeID = (String) newRegistry.getExperimentCatalog().add(ExpCatChildDataType.WORKFLOW_NODE_DETAIL, iDontNeedaNode, experimentId);
-
-                TaskDetails taskDetails = ExperimentModelUtil.cloneTaskFromExperiment(experiment);
-                taskDetails.setTaskID((String) newRegistry.getExperimentCatalog().add(ExpCatChildDataType.TASK_DETAIL, taskDetails, nodeID));
-                tasks.add(taskDetails);
-            }
-
-        } catch (Exception e) {
-            throw new OrchestratorException("Error during creating a task");
-        }
+    public List<TaskModel> createTasks(String experimentId) throws OrchestratorException {
+        ExperimentModel experiment = null;
+        List<TaskModel> tasks = new ArrayList<TaskModel>();
+        // FIXME : should change as create processes
+//        try {
+//            Registry newRegistry = orchestratorContext.getNewRegistry();
+//            experiment = (ExperimentModel) newRegistry.getExperimentCatalog().get(ExperimentCatalogModelType.EXPERIMENT, experimentId);
+//            List<ProcessModel> workflowNodeDetailsList = experiment.getWorkflowNodeDetailsList();
+//            if (workflowNodeDetailsList != null && !workflowNodeDetailsList.isEmpty()){
+//                for (WorkflowNodeDetails wfn : workflowNodeDetailsList){
+//                    List<TaskDetails> taskDetailsList = wfn.getTaskDetailsList();
+//                    if (taskDetailsList != null && !taskDetailsList.isEmpty()){
+//                        return taskDetailsList;
+//                    }
+//                }
+//            }else {
+//                WorkflowNodeDetails iDontNeedaNode = ExperimentModelUtil.createWorkflowNode("tempNode", null);
+//                String nodeID = (String) newRegistry.getExperimentCatalog().add(ExpCatChildDataType.WORKFLOW_NODE_DETAIL, iDontNeedaNode, experimentId);
+//
+//                TaskDetails taskDetails = ExperimentModelUtil.cloneTaskFromExperiment(experiment);
+//                taskDetails.setTaskID((String) newRegistry.getExperimentCatalog().add(ExpCatChildDataType.TASK_DETAIL, taskDetails, nodeID));
+//                tasks.add(taskDetails);
+//            }
+
+//        } catch (Exception e) {
+//            throw new OrchestratorException("Error during creating a task");
+//        }
         return tasks;
     }
 
-    public ValidationResults validateExperiment(Experiment experiment, WorkflowNodeDetails workflowNodeDetail, TaskDetails taskID) throws OrchestratorException,LaunchValidationException {
+    public ValidationResults validateExperiment(ExperimentModel experiment, ProcessModel processModel, TaskModel taskModel) throws OrchestratorException,LaunchValidationException {
         org.apache.airavata.model.error.ValidationResults validationResults = new org.apache.airavata.model.error.ValidationResults();
         validationResults.setValidationState(true); // initially making it to success, if atleast one failed them simply mark it failed.
         String errorMsg = "Validation Errors : ";
@@ -124,7 +128,7 @@ public class SimpleOrchestratorImpl extends AbstractOrchestrator{
                 try {
                     Class<? extends JobMetadataValidator> vClass = Class.forName(validator.trim()).asSubclass(JobMetadataValidator.class);
                     JobMetadataValidator jobMetadataValidator = vClass.newInstance();
-                    validationResults = jobMetadataValidator.validate(experiment, workflowNodeDetail, taskID);
+                    validationResults = jobMetadataValidator.validate(experiment, processModel, taskModel);
                     if (validationResults.isValidationState()) {
                         logger.info("Validation of " + validator + " is SUCCESSFUL");
                     } else {
@@ -137,17 +141,14 @@ public class SimpleOrchestratorImpl extends AbstractOrchestrator{
                                 }
                             }
                         }
-                        logger.error("Validation of " + validator + " for experiment Id " + experiment.getExperimentID() + " is FAILED:[error]. " + errorMsg);
+                        logger.error("Validation of " + validator + " for experiment Id " + experiment.getExperimentId() + " is FAILED:[error]. " + errorMsg);
                         validationResults.setValidationState(false);
                         try {
-                            ErrorDetails details = new ErrorDetails();
+                            ErrorModel details = new ErrorModel();
                             details.setActualErrorMessage(errorMsg);
-                            details.setCorrectiveAction(CorrectiveAction.RETRY_SUBMISSION);
-                            details.setActionableGroup(ActionableGroup.GATEWAYS_ADMINS);
                             details.setCreationTime(Calendar.getInstance().getTimeInMillis());
-                            details.setErrorCategory(ErrorCategory.APPLICATION_FAILURE);
-                            orchestratorContext.getNewRegistry().getExperimentCatalog().add(ExpCatChildDataType.ERROR_DETAIL, details,
-                                    taskID.getTaskID());
+                            orchestratorContext.getNewRegistry().getExperimentCatalog().add(ExpCatChildDataType.TASK_ERROR, details,
+                                    taskModel.getTaskId());
                         } catch (RegistryException e) {
                             logger.error("Error while saving error details to registry", e);
                         }
@@ -176,18 +177,19 @@ public class SimpleOrchestratorImpl extends AbstractOrchestrator{
         }
     }
 
-    public void cancelExperiment(Experiment experiment, WorkflowNodeDetails workflowNode, TaskDetails task, String tokenId)
+    public void cancelExperiment(ExperimentModel experiment, ProcessModel processModel, TaskModel task, String tokenId)
             throws OrchestratorException {
-        List<JobDetails> jobDetailsList = task.getJobDetailsList();
-        for(JobDetails jobDetails:jobDetailsList) {
-            JobState jobState = jobDetails.getJobStatus().getJobState();
-            if (jobState.getValue() > 4){
-                logger.error("Cannot cancel the job, because current job state is : " + jobState.toString() +
-                "jobId: " + jobDetails.getJobID() + " Job Name: " + jobDetails.getJobName());
-                return;
-            }
-        }
-        jobSubmitter.terminate(experiment.getExperimentID(),task.getTaskID(),tokenId);
+        // FIXME
+//        List<JobDetails> jobDetailsList = task.getJobDetailsList();
+//        for(JobDetails jobDetails:jobDetailsList) {
+//            JobState jobState = jobDetails.getJobStatus().getJobState();
+//            if (jobState.getValue() > 4){
+//                logger.error("Cannot cancel the job, because current job state is : " + jobState.toString() +
+//                "jobId: " + jobDetails.getJobID() + " Job Name: " + jobDetails.getJobName());
+//                return;
+//            }
+//        }
+//        jobSubmitter.terminate(experiment.getExperimentID(),task.getTaskID(),tokenId);
     }
 
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/server/OrchestratorServerHandler.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/server/OrchestratorServerHandler.java b/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/server/OrchestratorServerHandler.java
index 373adcf..86061e4 100644
--- a/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/server/OrchestratorServerHandler.java
+++ b/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/server/OrchestratorServerHandler.java
@@ -21,14 +21,16 @@
 
 package org.apache.airavata.orchestrator.server;
 
+import org.apache.airavata.model.experiment.ExperimentType;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.registry.core.app.catalog.resources.AppCatAbstractResource;
+import org.apache.airavata.registry.core.experiment.catalog.resources.AbstractExpCatResource;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
 import org.apache.airavata.registry.cpi.ComputeResource;
 import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.logger.AiravataLogger;
-import org.apache.airavata.common.logger.AiravataLoggerFactory;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.common.utils.AiravataZKUtils;
 import org.apache.airavata.common.utils.Constants;
@@ -52,14 +54,6 @@ import org.apache.airavata.model.messaging.event.MessageType;
 import org.apache.airavata.model.messaging.event.ProcessSubmitEvent;
 import org.apache.airavata.model.util.ExecutionType;
 import org.apache.airavata.model.experiment.ExperimentModel;
-import org.apache.airavata.model.experiment.ExperimentState;
-import org.apache.airavata.model.experiment.ExperimentStatus;
-import org.apache.airavata.model.experiment.TaskDetails;
-import org.apache.airavata.model.experiment.TaskState;
-import org.apache.airavata.model.experiment.TaskStatus;
-import org.apache.airavata.model.experiment.WorkflowNodeDetails;
-import org.apache.airavata.model.experiment.WorkflowNodeState;
-import org.apache.airavata.model.experiment.WorkflowNodeStatus;
 import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
 import org.apache.airavata.orchestrator.cpi.OrchestratorService;
 import org.apache.airavata.orchestrator.cpi.impl.SimpleOrchestratorImpl;
@@ -70,11 +64,10 @@ import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory
 import org.apache.airavata.registry.cpi.ExperimentCatalog;
 import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
 import org.apache.airavata.registry.cpi.RegistryException;
-import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.TaskDetailConstants;
-import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.WorkflowNodeConstants;
-import org.apache.airavata.workflow.core.WorkflowEnactmentService;
 import org.apache.thrift.TBase;
 import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Arrays;
 import java.util.Calendar;
@@ -83,7 +76,7 @@ import java.util.List;
 import java.util.Map;
 
 public class OrchestratorServerHandler implements OrchestratorService.Iface {
-	private static AiravataLogger log = AiravataLoggerFactory .getLogger(OrchestratorServerHandler.class);
+	private static Logger log = LoggerFactory.getLogger(OrchestratorServerHandler.class);
 	private SimpleOrchestratorImpl orchestrator = null;
 	private ExperimentCatalog experimentCatalog;
 	private static Integer mutex = new Integer(-1);
@@ -104,14 +97,6 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
 		// registering with zk
 		try {
 	        publisher = PublisherFactory.createActivityPublisher();
-			String zkhostPort = AiravataZKUtils.getZKhostPort();
-			String airavataServerHostPort = ServerSettings
-					.getSetting(Constants.ORCHESTRATOR_SERVER_HOST)
-					+ ":"
-					+ ServerSettings
-							.getSetting(Constants.ORCHESTRATOR_SERVER_PORT);
-			
-//            setGatewayName(ServerSettings.getDefaultUserGateway());
             setAiravataUserName(ServerSettings.getDefaultUser());
 		} catch (AiravataException e) {
             log.error(e.getMessage(), e);
@@ -157,12 +142,12 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
 	 * @param experimentId
 	 */
 	public boolean launchExperiment(String experimentId, String token) throws TException {
-        Experiment experiment = null; // this will inside the bottom catch statement
+        ExperimentModel experiment = null; // this will inside the bottom catch statement
         try {
-            experiment = (Experiment) experimentCatalog.get(
+            experiment = (ExperimentModel) experimentCatalog.get(
                     ExperimentCatalogModelType.EXPERIMENT, experimentId);
             if (experiment == null) {
-                log.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {} ", experimentId);
+                log.error(experimentId, "Error retrieving the Experiment by the given experimentID: {} ", experimentId);
                 return false;
             }
             CredentialReader credentialReader = GFacUtils.getCredentialReader();
@@ -179,21 +164,21 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
                 log.info("Couldn't identify the gateway Id using the credential token, Use default gateway Id");
 //                throw new AiravataException("Couldn't identify the gateway Id using the credential token");
             }
-            ExecutionType executionType = DataModelUtils.getExecutionType(gatewayId, experiment);
-            if (executionType == ExecutionType.SINGLE_APP) {
+            ExperimentType executionType = experiment.getExperimentType();
+            if (executionType == ExperimentType.SINGLE_APPLICATION) {
                 //its an single application execution experiment
-                log.debugId(experimentId, "Launching single application experiment {}.", experimentId);
+                log.debug(experimentId, "Launching single application experiment {}.", experimentId);
                 OrchestratorServerThreadPoolExecutor.getCachedThreadPool().execute(new SingleAppExperimentRunner(experimentId, token));
-            } else if (executionType == ExecutionType.WORKFLOW) {
+            } else if (executionType == ExperimentType.WORKFLOW) {
                 //its a workflow execution experiment
-                log.debugId(experimentId, "Launching workflow experiment {}.", experimentId);
+                log.debug(experimentId, "Launching workflow experiment {}.", experimentId);
                 launchWorkflowExperiment(experimentId, token);
             } else {
-                log.errorId(experimentId, "Couldn't identify experiment type, experiment {} is neither single application nor workflow.", experimentId);
-                throw new TException("Experiment '" + experimentId + "' launch failed. Unable to figureout execution type for application " + experiment.getApplicationId());
+                log.error(experimentId, "Couldn't identify experiment type, experiment {} is neither single application nor workflow.", experimentId);
+                throw new TException("Experiment '" + experimentId + "' launch failed. Unable to figureout execution type for application " + experiment.getExecutionId());
             }
         } catch (Exception e) {
-            throw new TException("Experiment '" + experimentId + "' launch failed. Unable to figureout execution type for application " + experiment.getApplicationId(), e);
+            throw new TException("Experiment '" + experimentId + "' launch failed. Unable to figureout execution type for application " + experiment.getExecutionId(), e);
         }
         return true;
 	}
@@ -211,41 +196,42 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
 			LaunchValidationException {
 		// TODO: Write the Orchestrator implementaion
 		try {
-			List<TaskDetails> tasks = orchestrator.createTasks(experimentId);
+			List<TaskModel> tasks = orchestrator.createTasks(experimentId);
 			if (tasks.size() > 1) {
 				log.info("There are multiple tasks for this experiment, So Orchestrator will launch multiple Jobs");
 			}
 			List<String> ids = experimentCatalog.getIds(
-					ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
-					WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
-			for (String workflowNodeId : ids) {
-				WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) experimentCatalog
-						.get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
-								workflowNodeId);
-				List<Object> taskDetailList = experimentCatalog.get(
-						ExperimentCatalogModelType.TASK_DETAIL,
-						TaskDetailConstants.NODE_ID, workflowNodeId);
-				for (Object o : taskDetailList) {
-					TaskDetails taskID = (TaskDetails) o;
-					// iterate through all the generated tasks and performs the
-					// job submisssion+monitoring
-					Experiment experiment = (Experiment) experimentCatalog.get(
-							ExperimentCatalogModelType.EXPERIMENT, experimentId);
-					if (experiment == null) {
-						log.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}.",
-                                experimentId);
-						return false;
-					}
-					return orchestrator.validateExperiment(experiment,
-							workflowNodeDetail, taskID).isSetValidationState();
-				}
+					ExperimentCatalogModelType.PROCESS,
+					AbstractExpCatResource.ProcessConstants.EXPERIMENT_ID, experimentId);
+			for (String processId : ids) {
+				ProcessModel processModel = (ProcessModel) experimentCatalog
+						.get(ExperimentCatalogModelType.PROCESS,
+								processId);
+                // FIXME : no need to create tasks at orchestrator level
+//				List<Object> taskDetailList = experimentCatalog.get(
+//						ExperimentCatalogModelType.TASK_DETAIL,
+////						TaskDetailConstants.NODE_ID, processId);
+//				for (Object o : taskDetailList) {
+//					TaskDetails taskID = (TaskDetails) o;
+//					// iterate through all the generated tasks and performs the
+//					// job submisssion+monitoring
+//					Experiment experiment = (Experiment) experimentCatalog.get(
+//							ExperimentCatalogModelType.EXPERIMENT, experimentId);
+//					if (experiment == null) {
+//						log.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}.",
+//                                experimentId);
+//						return false;
+//					}
+//					return orchestrator.validateExperiment(experiment,
+//							processModel, taskID).isSetValidationState();
+//				}
 			}
 
 		} catch (OrchestratorException e) {
-            log.errorId(experimentId, "Error while validating experiment", e);
+            log.error(experimentId, "Error while validating experiment", e);
 			throw new TException(e);
 		} catch (RegistryException e) {
-            log.errorId(experimentId, "Error while validating experiment", e);
+            log.error(experimentId, "Error while validating experiment", e);
 			throw new TException(e);
 		}
 		return false;
@@ -260,7 +246,7 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
 	 * @throws TException
 	 */
 	public boolean terminateExperiment(String experimentId, String tokenId) throws TException {
-        log.infoId(experimentId, "Experiment: {} is cancelling  !!!!!", experimentId);
+        log.info(experimentId, "Experiment: {} is cancelling  !!!!!", experimentId);
         return validateStatesAndCancel(experimentId, tokenId);
 	}
 
@@ -282,77 +268,78 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
 
 	@Override
 	public boolean launchTask(String taskId, String airavataCredStoreToken) throws TException {
-		try {
-			TaskDetails taskData = (TaskDetails) experimentCatalog.get(
-					ExperimentCatalogModelType.TASK_DETAIL, taskId);
-			String applicationId = taskData.getApplicationId();
-			if (applicationId == null) {
-                log.errorId(taskId, "Application id shouldn't be null.");
-				throw new OrchestratorException("Error executing the job, application id shouldn't be null.");
-			}
-			ApplicationDeploymentDescription applicationDeploymentDescription = getAppDeployment(taskData, applicationId);
-            taskData.setApplicationDeploymentId(applicationDeploymentDescription.getAppDeploymentId());
-			experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, taskData,taskData.getTaskID());
-			List<Object> workflowNodeDetailList = experimentCatalog.get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
-							org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.WorkflowNodeConstants.TASK_LIST, taskData);
-			if (workflowNodeDetailList != null
-					&& workflowNodeDetailList.size() > 0) {
-				List<Object> experimentList = experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT,
-								org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.ExperimentConstants.WORKFLOW_NODE_LIST,
-								(WorkflowNodeDetails) workflowNodeDetailList.get(0));
-				if (experimentList != null && experimentList.size() > 0) {
-					return orchestrator
-							.launchExperiment(
-									(Experiment) experimentList.get(0),
-									(WorkflowNodeDetails) workflowNodeDetailList
-											.get(0), taskData,airavataCredStoreToken);
-				}
-			}
-		} catch (Exception e) {
-            log.errorId(taskId, "Error while launching task ", e);
-            throw new TException(e);
-        }
-        log.infoId(taskId, "No experiment found associated in task {}", taskId);
+        // FIXME : should be launch process instead of the task
+//		try {
+//			TaskDetails taskData = (TaskDetails) experimentCatalog.get(
+//					ExperimentCatalogModelType.TASK_DETAIL, taskId);
+//			String applicationId = taskData.getApplicationId();
+//			if (applicationId == null) {
+//                log.errorId(taskId, "Application id shouldn't be null.");
+//				throw new OrchestratorException("Error executing the job, application id shouldn't be null.");
+//			}
+//			ApplicationDeploymentDescription applicationDeploymentDescription = getAppDeployment(taskData, applicationId);
+//            taskData.setApplicationDeploymentId(applicationDeploymentDescription.getAppDeploymentId());
+//			experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, taskData,taskData.getTaskID());
+//			List<Object> workflowNodeDetailList = experimentCatalog.get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
+//							org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.WorkflowNodeConstants.TASK_LIST, taskData);
+//			if (workflowNodeDetailList != null
+//					&& workflowNodeDetailList.size() > 0) {
+//				List<Object> experimentList = experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT,
+//								org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.ExperimentConstants.WORKFLOW_NODE_LIST,
+//								(WorkflowNodeDetails) workflowNodeDetailList.get(0));
+//				if (experimentList != null && experimentList.size() > 0) {
+//					return orchestrator
+//							.launchExperiment(
+//									(Experiment) experimentList.get(0),
+//									(WorkflowNodeDetails) workflowNodeDetailList
+//											.get(0), taskData,airavataCredStoreToken);
+//				}
+//			}
+//		} catch (Exception e) {
+//            log.errorId(taskId, "Error while launching task ", e);
+//            throw new TException(e);
+//        }
+//        log.infoId(taskId, "No experiment found associated in task {}", taskId);
         return false;
 	}
 
-	private ApplicationDeploymentDescription getAppDeployment(
-			TaskDetails taskData, String applicationId)
-			throws AppCatalogException, OrchestratorException,
-			ClassNotFoundException, ApplicationSettingsException,
-			InstantiationException, IllegalAccessException {
-		AppCatalog appCatalog = RegistryFactory.getAppCatalog();
-		String selectedModuleId = getModuleId(appCatalog, applicationId);
-		ApplicationDeploymentDescription applicationDeploymentDescription = getAppDeployment(
-				appCatalog, taskData, selectedModuleId);
-		return applicationDeploymentDescription;
-	}
-
-	private ApplicationDeploymentDescription getAppDeployment(
-			AppCatalog appCatalog, TaskDetails taskData, String selectedModuleId)
-			throws AppCatalogException, ClassNotFoundException,
-			ApplicationSettingsException, InstantiationException,
-			IllegalAccessException {
-		Map<String, String> moduleIdFilter = new HashMap<String, String>();
-		moduleIdFilter.put(AppCatAbstractResource.ApplicationDeploymentConstants.APP_MODULE_ID, selectedModuleId);
-		if (taskData.getTaskScheduling()!=null && taskData.getTaskScheduling().getResourceHostId() != null) {
-		    moduleIdFilter.put(AppCatAbstractResource.ApplicationDeploymentConstants.COMPUTE_HOST_ID, taskData.getTaskScheduling().getResourceHostId());
-		}
-		List<ApplicationDeploymentDescription> applicationDeployements = appCatalog.getApplicationDeployment().getApplicationDeployements(moduleIdFilter);
-		Map<ComputeResourceDescription, ApplicationDeploymentDescription> deploymentMap = new HashMap<ComputeResourceDescription, ApplicationDeploymentDescription>();
-		ComputeResource computeResource = appCatalog.getComputeResource();
-		for (ApplicationDeploymentDescription deploymentDescription : applicationDeployements) {
-			deploymentMap.put(computeResource.getComputeResource(deploymentDescription.getComputeHostId()),deploymentDescription);
-		}
-		List<ComputeResourceDescription> computeHostList = Arrays.asList(deploymentMap.keySet().toArray(new ComputeResourceDescription[]{}));	
-		Class<? extends HostScheduler> aClass = Class.forName(
-				ServerSettings.getHostScheduler()).asSubclass(
-				HostScheduler.class);
-		HostScheduler hostScheduler = aClass.newInstance();
-		ComputeResourceDescription ComputeResourceDescription = hostScheduler.schedule(computeHostList);
-		ApplicationDeploymentDescription applicationDeploymentDescription = deploymentMap.get(ComputeResourceDescription);
-		return applicationDeploymentDescription;
-	}
+//	private ApplicationDeploymentDescription getAppDeployment(
+//			TaskDetails taskData, String applicationId)
+//			throws AppCatalogException, OrchestratorException,
+//			ClassNotFoundException, ApplicationSettingsException,
+//			InstantiationException, IllegalAccessException {
+//		AppCatalog appCatalog = RegistryFactory.getAppCatalog();
+//		String selectedModuleId = getModuleId(appCatalog, applicationId);
+//		ApplicationDeploymentDescription applicationDeploymentDescription = getAppDeployment(
+//				appCatalog, taskData, selectedModuleId);
+//		return applicationDeploymentDescription;
+//	}
+
+//	private ApplicationDeploymentDescription getAppDeployment(
+//			AppCatalog appCatalog, TaskDetails taskData, String selectedModuleId)
+//			throws AppCatalogException, ClassNotFoundException,
+//			ApplicationSettingsException, InstantiationException,
+//			IllegalAccessException {
+//		Map<String, String> moduleIdFilter = new HashMap<String, String>();
+//		moduleIdFilter.put(AppCatAbstractResource.ApplicationDeploymentConstants.APP_MODULE_ID, selectedModuleId);
+//		if (taskData.getTaskScheduling()!=null && taskData.getTaskScheduling().getResourceHostId() != null) {
+//		    moduleIdFilter.put(AppCatAbstractResource.ApplicationDeploymentConstants.COMPUTE_HOST_ID, taskData.getTaskScheduling().getResourceHostId());
+//		}
+//		List<ApplicationDeploymentDescription> applicationDeployements = appCatalog.getApplicationDeployment().getApplicationDeployements(moduleIdFilter);
+//		Map<ComputeResourceDescription, ApplicationDeploymentDescription> deploymentMap = new HashMap<ComputeResourceDescription, ApplicationDeploymentDescription>();
+//		ComputeResource computeResource = appCatalog.getComputeResource();
+//		for (ApplicationDeploymentDescription deploymentDescription : applicationDeployements) {
+//			deploymentMap.put(computeResource.getComputeResource(deploymentDescription.getComputeHostId()),deploymentDescription);
+//		}
+//		List<ComputeResourceDescription> computeHostList = Arrays.asList(deploymentMap.keySet().toArray(new ComputeResourceDescription[]{}));
+//		Class<? extends HostScheduler> aClass = Class.forName(
+//				ServerSettings.getHostScheduler()).asSubclass(
+//				HostScheduler.class);
+//		HostScheduler hostScheduler = aClass.newInstance();
+//		ComputeResourceDescription ComputeResourceDescription = hostScheduler.schedule(computeHostList);
+//		ApplicationDeploymentDescription applicationDeploymentDescription = deploymentMap.get(ComputeResourceDescription);
+//		return applicationDeploymentDescription;
+//	}
 
 	private String getModuleId(AppCatalog appCatalog, String applicationId)
 			throws AppCatalogException, OrchestratorException {
@@ -369,153 +356,133 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
 	}
 
     private boolean validateStatesAndCancel(String experimentId, String tokenId)throws TException{
-        try {
-            Experiment experiment = (Experiment) experimentCatalog.get(
-                    ExperimentCatalogModelType.EXPERIMENT, experimentId);
-			log.info("Waiting for zookeeper to connect to the server");
-			synchronized (mutex){
-				mutex.wait(5000);
-			}
-            if (experiment == null) {
-                log.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}.", experimentId);
-                throw new OrchestratorException("Error retrieving the Experiment by the given experimentID: " + experimentId);
-            }
-            ExperimentState experimentState = experiment.getExperimentStatus().getExperimentState();
-            if (isCancelValid(experimentState)){
-                ExperimentStatus status = new ExperimentStatus();
-                status.setExperimentState(ExperimentState.CANCELING);
-                status.setTimeOfStateChange(Calendar.getInstance()
-                        .getTimeInMillis());
-                experiment.setExperimentStatus(status);
-                experimentCatalog.update(ExperimentCatalogModelType.EXPERIMENT, experiment,
-                        experimentId);
-
-                List<String> ids = experimentCatalog.getIds(
-                        ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
-                        WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
-                for (String workflowNodeId : ids) {
-                    WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) experimentCatalog
-                            .get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
-                                    workflowNodeId);
-                    int value = workflowNodeDetail.getWorkflowNodeStatus().getWorkflowNodeState().getValue();
-                    if ( value> 1 && value < 7) { // we skip the unknown state
-                        log.error(workflowNodeDetail.getNodeName() + " Workflow Node status cannot mark as cancelled, because " +
-                                "current status is " + workflowNodeDetail.getWorkflowNodeStatus().getWorkflowNodeState().toString());
-                        continue; // this continue is very useful not to process deeper loops if the upper layers have non-cancel states
-                    } else {
-                        WorkflowNodeStatus workflowNodeStatus = new WorkflowNodeStatus();
-                        workflowNodeStatus.setWorkflowNodeState(WorkflowNodeState.CANCELING);
-                        workflowNodeStatus.setTimeOfStateChange(Calendar.getInstance()
-                                .getTimeInMillis());
-                        workflowNodeDetail.setWorkflowNodeStatus(workflowNodeStatus);
-                        experimentCatalog.update(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL, workflowNodeDetail,
-                                workflowNodeId);
-                    }
-                    List<Object> taskDetailList = experimentCatalog.get(
-                            ExperimentCatalogModelType.TASK_DETAIL,
-                            TaskDetailConstants.NODE_ID, workflowNodeId);
-                    for (Object o : taskDetailList) {
-                        TaskDetails taskDetails = (TaskDetails) o;
-                        TaskStatus taskStatus = ((TaskDetails) o).getTaskStatus();
-                        if (taskStatus.getExecutionState().getValue() > 7 && taskStatus.getExecutionState().getValue()<12) {
-                            log.error(((TaskDetails) o).getTaskID() + " Task status cannot mark as cancelled, because " +
-                                    "current task state is " + ((TaskDetails) o).getTaskStatus().getExecutionState().toString());
-                            continue;// this continue is very useful not to process deeper loops if the upper layers have non-cancel states
-                        } else {
-                            taskStatus.setExecutionState(TaskState.CANCELING);
-                            taskStatus.setTimeOfStateChange(Calendar.getInstance()
-                                    .getTimeInMillis());
-                            taskDetails.setTaskStatus(taskStatus);
-                            experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, o,
-                                    taskDetails.getTaskID());
-                        }
-                        orchestrator.cancelExperiment(experiment,
-                                workflowNodeDetail, taskDetails, tokenId);
-                        // Status update should be done at the monitor
-                    }
-                }
-            }else {
-                if (isCancelAllowed(experimentState)){
-                    // when experiment status is < 3 no jobDetails object is created,
-                    // so we don't have to worry, we simply have to change the status and stop the execution
-                    ExperimentStatus status = new ExperimentStatus();
-                    status.setExperimentState(ExperimentState.CANCELED);
-                    status.setTimeOfStateChange(Calendar.getInstance()
-                            .getTimeInMillis());
-                    experiment.setExperimentStatus(status);
-                    experimentCatalog.update(ExperimentCatalogModelType.EXPERIMENT, experiment,
-                            experimentId);
-                    List<String> ids = experimentCatalog.getIds(
-                            ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
-                            WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
-                    for (String workflowNodeId : ids) {
-                        WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) experimentCatalog
-                                .get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
-                                        workflowNodeId);
-                        WorkflowNodeStatus workflowNodeStatus = new WorkflowNodeStatus();
-                        workflowNodeStatus.setWorkflowNodeState(WorkflowNodeState.CANCELED);
-                        workflowNodeStatus.setTimeOfStateChange(Calendar.getInstance()
-                                .getTimeInMillis());
-                        workflowNodeDetail.setWorkflowNodeStatus(workflowNodeStatus);
-                        experimentCatalog.update(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL, workflowNodeDetail,
-                                workflowNodeId);
-                        List<Object> taskDetailList = experimentCatalog.get(
-                                ExperimentCatalogModelType.TASK_DETAIL,
-                                TaskDetailConstants.NODE_ID, workflowNodeId);
-                        for (Object o : taskDetailList) {
-                            TaskDetails taskDetails = (TaskDetails) o;
-                            TaskStatus taskStatus = ((TaskDetails) o).getTaskStatus();
-                            taskStatus.setExecutionState(TaskState.CANCELED);
-                            taskStatus.setTimeOfStateChange(Calendar.getInstance()
-                                    .getTimeInMillis());
-                            taskDetails.setTaskStatus(taskStatus);
-                            experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, o,
-                                    taskDetails);
-                        }
-                    }
-                }else {
-                    log.errorId(experimentId, "Unable to mark experiment as Cancelled, current state {} doesn't allow to cancel the experiment {}.",
-                            experiment.getExperimentStatus().getExperimentState().toString(), experimentId);
-                    throw new OrchestratorException("Unable to mark experiment as Cancelled, because current state is: "
-                            + experiment.getExperimentStatus().getExperimentState().toString());
-                }
-            }
-            log.info("Experiment: " + experimentId + " is cancelled !!!!!");
-        } catch (Exception e) {
-            throw new TException(e);
-        }
+        // FIXME
+//        try {
+//            Experiment experiment = (Experiment) experimentCatalog.get(
+//                    ExperimentCatalogModelType.EXPERIMENT, experimentId);
+//			log.info("Waiting for zookeeper to connect to the server");
+//			synchronized (mutex){
+//				mutex.wait(5000);
+//			}
+//            if (experiment == null) {
+//                log.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}.", experimentId);
+//                throw new OrchestratorException("Error retrieving the Experiment by the given experimentID: " + experimentId);
+//            }
+//            ExperimentState experimentState = experiment.getExperimentStatus().getExperimentState();
+//            if (isCancelValid(experimentState)){
+//                ExperimentStatus status = new ExperimentStatus();
+//                status.setExperimentState(ExperimentState.CANCELING);
+//                status.setTimeOfStateChange(Calendar.getInstance()
+//                        .getTimeInMillis());
+//                experiment.setExperimentStatus(status);
+//                experimentCatalog.update(ExperimentCatalogModelType.EXPERIMENT, experiment,
+//                        experimentId);
+//
+//                List<String> ids = experimentCatalog.getIds(
+//                        ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
+//                        WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
+//                for (String workflowNodeId : ids) {
+//                    WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) experimentCatalog
+//                            .get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
+//                                    workflowNodeId);
+//                    int value = workflowNodeDetail.getWorkflowNodeStatus().getWorkflowNodeState().getValue();
+//                    if ( value> 1 && value < 7) { // we skip the unknown state
+//                        log.error(workflowNodeDetail.getNodeName() + " Workflow Node status cannot mark as cancelled, because " +
+//                                "current status is " + workflowNodeDetail.getWorkflowNodeStatus().getWorkflowNodeState().toString());
+//                        continue; // this continue is very useful not to process deeper loops if the upper layers have non-cancel states
+//                    } else {
+//                        WorkflowNodeStatus workflowNodeStatus = new WorkflowNodeStatus();
+//                        workflowNodeStatus.setWorkflowNodeState(WorkflowNodeState.CANCELING);
+//                        workflowNodeStatus.setTimeOfStateChange(Calendar.getInstance()
+//                                .getTimeInMillis());
+//                        workflowNodeDetail.setWorkflowNodeStatus(workflowNodeStatus);
+//                        experimentCatalog.update(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL, workflowNodeDetail,
+//                                workflowNodeId);
+//                    }
+//                    List<Object> taskDetailList = experimentCatalog.get(
+//                            ExperimentCatalogModelType.TASK_DETAIL,
+//                            TaskDetailConstants.NODE_ID, workflowNodeId);
+//                    for (Object o : taskDetailList) {
+//                        TaskDetails taskDetails = (TaskDetails) o;
+//                        TaskStatus taskStatus = ((TaskDetails) o).getTaskStatus();
+//                        if (taskStatus.getExecutionState().getValue() > 7 && taskStatus.getExecutionState().getValue()<12) {
+//                            log.error(((TaskDetails) o).getTaskID() + " Task status cannot mark as cancelled, because " +
+//                                    "current task state is " + ((TaskDetails) o).getTaskStatus().getExecutionState().toString());
+//                            continue;// this continue is very useful not to process deeper loops if the upper layers have non-cancel states
+//                        } else {
+//                            taskStatus.setExecutionState(TaskState.CANCELING);
+//                            taskStatus.setTimeOfStateChange(Calendar.getInstance()
+//                                    .getTimeInMillis());
+//                            taskDetails.setTaskStatus(taskStatus);
+//                            experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, o,
+//                                    taskDetails.getTaskID());
+//                        }
+//                        orchestrator.cancelExperiment(experiment,
+//                                workflowNodeDetail, taskDetails, tokenId);
+//                        // Status update should be done at the monitor
+//                    }
+//                }
+//            }else {
+//                if (isCancelAllowed(experimentState)){
+//                    // when experiment status is < 3 no jobDetails object is created,
+//                    // so we don't have to worry, we simply have to change the status and stop the execution
+//                    ExperimentStatus status = new ExperimentStatus();
+//                    status.setExperimentState(ExperimentState.CANCELED);
+//                    status.setTimeOfStateChange(Calendar.getInstance()
+//                            .getTimeInMillis());
+//                    experiment.setExperimentStatus(status);
+//                    experimentCatalog.update(ExperimentCatalogModelType.EXPERIMENT, experiment,
+//                            experimentId);
+//                    List<String> ids = experimentCatalog.getIds(
+//                            ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
+//                            WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
+//                    for (String workflowNodeId : ids) {
+//                        WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) experimentCatalog
+//                                .get(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL,
+//                                        workflowNodeId);
+//                        WorkflowNodeStatus workflowNodeStatus = new WorkflowNodeStatus();
+//                        workflowNodeStatus.setWorkflowNodeState(WorkflowNodeState.CANCELED);
+//                        workflowNodeStatus.setTimeOfStateChange(Calendar.getInstance()
+//                                .getTimeInMillis());
+//                        workflowNodeDetail.setWorkflowNodeStatus(workflowNodeStatus);
+//                        experimentCatalog.update(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL, workflowNodeDetail,
+//                                workflowNodeId);
+//                        List<Object> taskDetailList = experimentCatalog.get(
+//                                ExperimentCatalogModelType.TASK_DETAIL,
+//                                TaskDetailConstants.NODE_ID, workflowNodeId);
+//                        for (Object o : taskDetailList) {
+//                            TaskDetails taskDetails = (TaskDetails) o;
+//                            TaskStatus taskStatus = ((TaskDetails) o).getTaskStatus();
+//                            taskStatus.setExecutionState(TaskState.CANCELED);
+//                            taskStatus.setTimeOfStateChange(Calendar.getInstance()
+//                                    .getTimeInMillis());
+//                            taskDetails.setTaskStatus(taskStatus);
+//                            experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, o,
+//                                    taskDetails);
+//                        }
+//                    }
+//                }else {
+//                    log.errorId(experimentId, "Unable to mark experiment as Cancelled, current state {} doesn't allow to cancel the experiment {}.",
+//                            experiment.getExperimentStatus().getExperimentState().toString(), experimentId);
+//                    throw new OrchestratorException("Unable to mark experiment as Cancelled, because current state is: "
+//                            + experiment.getExperimentStatus().getExperimentState().toString());
+//                }
+//            }
+//            log.info("Experiment: " + experimentId + " is cancelled !!!!!");
+//        } catch (Exception e) {
+//            throw new TException(e);
+//        }
         return true;
     }
 
-    private boolean isCancelValid(ExperimentState state){
-        switch (state) {
-            case LAUNCHED:
-            case EXECUTING:
-            case CANCELING:
-                return true;
-            default:
-                return false;
-        }
-    }
-
-    private boolean isCancelAllowed(ExperimentState state){
-        switch (state) {
-            case CREATED:
-            case VALIDATED:
-            case SCHEDULED:
-                return true;
-            default:
-                return false;
-        }
-    }
-
     private void launchWorkflowExperiment(String experimentId, String airavataCredStoreToken) throws TException {
-        try {
-            WorkflowEnactmentService.getInstance().
-                    submitWorkflow(experimentId, airavataCredStoreToken, getGatewayName(), getRabbitMQProcessPublisher());
-        } catch (Exception e) {
-            log.error("Error while launching workflow", e);
-        }
+        // FIXME
+//        try {
+//            WorkflowEnactmentService.getInstance().
+//                    submitWorkflow(experimentId, airavataCredStoreToken, getGatewayName(), getRabbitMQProcessPublisher());
+//        } catch (Exception e) {
+//            log.error("Error while launching workflow", e);
+//        }
     }
 
     public synchronized RabbitMQProcessPublisher getRabbitMQProcessPublisher() throws Exception {
@@ -543,64 +510,65 @@ public class OrchestratorServerHandler implements OrchestratorService.Iface {
             }
         }
 
+        // FIXME
         private boolean launchSingleAppExperiment() throws TException {
-            Experiment experiment = null;
-            try {
-                List<String> ids = experimentCatalog.getIds(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL, WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
-                for (String workflowNodeId : ids) {
-//                WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, workflowNodeId);
-                    List<Object> taskDetailList = experimentCatalog.get(ExperimentCatalogModelType.TASK_DETAIL, TaskDetailConstants.NODE_ID, workflowNodeId);
-                    for (Object o : taskDetailList) {
-                        TaskDetails taskData = (TaskDetails) o;
-                        //iterate through all the generated tasks and performs the job submisssion+monitoring
-                        experiment = (Experiment) experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT, experimentId);
-                        if (experiment == null) {
-                            log.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}", experimentId);
-                            return false;
-                        }
-                        String gatewayId = null;
-                        CredentialReader credentialReader = GFacUtils.getCredentialReader();
-                        if (credentialReader != null) {
-                            try {
-                                gatewayId = credentialReader.getGatewayID(airavataCredStoreToken);
-                            } catch (Exception e) {
-                                log.error(e.getLocalizedMessage());
-                            }
-                        }
-                        if (gatewayId == null || gatewayId.isEmpty()) {
-                            gatewayId = ServerSettings.getDefaultUserGateway();
-                        }
-                        ExperimentStatusChangeEvent event = new ExperimentStatusChangeEvent(ExperimentState.LAUNCHED,
-                                experimentId,
-                                gatewayId);
-                        String messageId = AiravataUtils.getId("EXPERIMENT");
-                        MessageContext messageContext = new MessageContext(event, MessageType.EXPERIMENT, messageId, gatewayId);
-                        messageContext.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-                        publisher.publish(messageContext);
-                        experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, taskData, taskData.getTaskID());
-                        //launching the experiment
-                        launchTask(taskData.getTaskID(), airavataCredStoreToken);
-                    }
-                }
-
-            } catch (Exception e) {
-                // Here we really do not have to do much because only potential failure can happen
-                // is in gfac, if there are errors in gfac, it will handle the experiment/task/job statuses
-                // We might get failures in registry access before submitting the jobs to gfac, in that case we
-                // leave the status of these as created.
-                ExperimentStatus status = new ExperimentStatus();
-                status.setExperimentState(ExperimentState.FAILED);
-                status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
-                experiment.setExperimentStatus(status);
-                try {
-                    experimentCatalog.update(ExperimentCatalogModelType.EXPERIMENT_STATUS, status, experimentId);
-                } catch (RegistryException e1) {
-                    log.errorId(experimentId, "Error while updating experiment status to " + status.toString(), e);
-                    throw new TException(e);
-                }
-                log.errorId(experimentId, "Error while updating task status, hence updated experiment status to " + status.toString(), e);
-                throw new TException(e);
-            }
+//            ExperimentModel experiment = null;
+//            try {
+//                List<String> ids = experimentCatalog.getIds(ExperimentCatalogModelType.WORKFLOW_NODE_DETAIL, WorkflowNodeConstants.EXPERIMENT_ID, experimentId);
+//                for (String workflowNodeId : ids) {
+////                WorkflowNodeDetails workflowNodeDetail = (WorkflowNodeDetails) registry.get(RegistryModelType.WORKFLOW_NODE_DETAIL, workflowNodeId);
+//                    List<Object> taskDetailList = experimentCatalog.get(ExperimentCatalogModelType.TASK_DETAIL, TaskDetailConstants.NODE_ID, workflowNodeId);
+//                    for (Object o : taskDetailList) {
+//                        TaskDetails taskData = (TaskDetails) o;
+//                        //iterate through all the generated tasks and performs the job submisssion+monitoring
+//                        experiment = (Experiment) experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT, experimentId);
+//                        if (experiment == null) {
+//                            log.errorId(experimentId, "Error retrieving the Experiment by the given experimentID: {}", experimentId);
+//                            return false;
+//                        }
+//                        String gatewayId = null;
+//                        CredentialReader credentialReader = GFacUtils.getCredentialReader();
+//                        if (credentialReader != null) {
+//                            try {
+//                                gatewayId = credentialReader.getGatewayID(airavataCredStoreToken);
+//                            } catch (Exception e) {
+//                                log.error(e.getLocalizedMessage());
+//                            }
+//                        }
+//                        if (gatewayId == null || gatewayId.isEmpty()) {
+//                            gatewayId = ServerSettings.getDefaultUserGateway();
+//                        }
+//                        ExperimentStatusChangeEvent event = new ExperimentStatusChangeEvent(ExperimentState.LAUNCHED,
+//                                experimentId,
+//                                gatewayId);
+//                        String messageId = AiravataUtils.getId("EXPERIMENT");
+//                        MessageContext messageContext = new MessageContext(event, MessageType.EXPERIMENT, messageId, gatewayId);
+//                        messageContext.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+//                        publisher.publish(messageContext);
+//                        experimentCatalog.update(ExperimentCatalogModelType.TASK_DETAIL, taskData, taskData.getTaskID());
+//                        //launching the experiment
+//                        launchTask(taskData.getTaskID(), airavataCredStoreToken);
+//                    }
+//                }
+//
+//            } catch (Exception e) {
+//                // Here we really do not have to do much because only potential failure can happen
+//                // is in gfac, if there are errors in gfac, it will handle the experiment/task/job statuses
+//                // We might get failures in registry access before submitting the jobs to gfac, in that case we
+//                // leave the status of these as created.
+//                ExperimentStatus status = new ExperimentStatus();
+//                status.setExperimentState(ExperimentState.FAILED);
+//                status.setTimeOfStateChange(Calendar.getInstance().getTimeInMillis());
+//                experiment.setExperimentStatus(status);
+//                try {
+//                    experimentCatalog.update(ExperimentCatalogModelType.EXPERIMENT_STATUS, status, experimentId);
+//                } catch (RegistryException e1) {
+//                    log.errorId(experimentId, "Error while updating experiment status to " + status.toString(), e);
+//                    throw new TException(e);
+//                }
+//                log.errorId(experimentId, "Error while updating task status, hence updated experiment status to " + status.toString(), e);
+//                throw new TException(e);
+//            }
             return true;
         }
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/DataModelUtils.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/DataModelUtils.java b/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/DataModelUtils.java
index bbe1dd4..f9f8115 100644
--- a/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/DataModelUtils.java
+++ b/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/DataModelUtils.java
@@ -34,11 +34,11 @@ import org.slf4j.LoggerFactory;
 public class DataModelUtils {
 
     private final static Logger logger = LoggerFactory.getLogger(DataModelUtils.class);
-	public static ExecutionType getExecutionType(String gatewayId, Experiment experiment){
+	public static ExecutionType getExecutionType(String gatewayId, ExperimentModel experiment){
 		try {
 			ApplicationInterface applicationInterface = RegistryFactory.getAppCatalog().getApplicationInterface();
 			List<String> allApplicationInterfaceIds = applicationInterface.getAllApplicationInterfaceIds();
-			String applicationId = experiment.getApplicationId();
+			String applicationId = experiment.getExecutionId();
 			if (allApplicationInterfaceIds.contains(applicationId)){
 				return ExecutionType.SINGLE_APP;
 			} else {

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/OrchestratorServerThreadPoolExecutor.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/OrchestratorServerThreadPoolExecutor.java b/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/OrchestratorServerThreadPoolExecutor.java
index 1730998..3fdba74 100644
--- a/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/OrchestratorServerThreadPoolExecutor.java
+++ b/modules/orchestrator/orchestrator-service/src/main/java/org/apache/airavata/orchestrator/util/OrchestratorServerThreadPoolExecutor.java
@@ -26,12 +26,12 @@ import java.util.concurrent.Executors;
 
 
 import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.logger.AiravataLogger;
-import org.apache.airavata.common.logger.AiravataLoggerFactory;
 import org.apache.airavata.common.utils.ServerSettings;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class OrchestratorServerThreadPoolExecutor {
-	    private final static AiravataLogger logger = AiravataLoggerFactory.getLogger(OrchestratorServerThreadPoolExecutor.class);
+	    private final static Logger logger = LoggerFactory.getLogger(OrchestratorServerThreadPoolExecutor.class);
 	    public static final String AIRAVATA_SERVER_THREAD_POOL_SIZE = "airavata.server.thread.pool.size";
 
 	    private static ExecutorService threadPool;

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java b/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
index ff7d20e..d37d322 100644
--- a/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
+++ b/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
@@ -31,10 +31,12 @@ import org.apache.airavata.common.utils.XMLUtil;
 import org.apache.airavata.common.utils.listener.AbstractActivityListener;
 import org.apache.airavata.messaging.core.MessageContext;
 import org.apache.airavata.messaging.core.Publisher;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+import org.apache.airavata.model.application.io.OutputDataObjectType;
 import org.apache.airavata.model.messaging.event.*;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.status.ExperimentState;
+import org.apache.airavata.model.status.ProcessState;
+import org.apache.airavata.model.status.ProcessStatus;
 import org.apache.airavata.model.util.ExperimentModelUtil;
 import org.apache.airavata.model.experiment.*;
 import org.apache.airavata.orchestrator.cpi.OrchestratorService;
@@ -101,9 +103,9 @@ public class WorkflowInterpreter implements AbstractActivityListener{
 	
 	private WorkflowInterpreterInteractor interactor;
 
-	private Map<Node,WorkflowNodeDetails> nodeInstanceList;
+	private Map<Node,ProcessModel> nodeInstanceList;
 
-	private Experiment experiment;
+	private ExperimentModel experiment;
 	private ExperimentCatalog experimentCatalog;
 
     public void setGatewayId(String gatewayId) {
@@ -131,7 +133,7 @@ public class WorkflowInterpreter implements AbstractActivityListener{
      * @param config
      * @param orchestratorClient
      */
-	public WorkflowInterpreter(Experiment experiment, String credentialStoreToken,
+	public WorkflowInterpreter(ExperimentModel experiment, String credentialStoreToken,
                                WorkflowInterpreterConfiguration config, OrchestratorService.Client orchestratorClient, Publisher publisher) {
 		this.setConfig(config);
 		this.setExperiment(experiment);
@@ -363,18 +365,18 @@ public class WorkflowInterpreter implements AbstractActivityListener{
 		} finally{
         	cleanup();
 			this.getWorkflow().setExecutionState(WorkflowExecutionState.NONE);
-            ExperimentStatusChangeEvent event = new ExperimentStatusChangeEvent(ExperimentState.COMPLETED, experiment.getExperimentID(), gatewayId);
+            ExperimentStatusChangeEvent event = new ExperimentStatusChangeEvent(ExperimentState.COMPLETED, experiment.getExperimentId(), gatewayId);
             MessageContext msgCtx = new MessageContext(event, MessageType.EXPERIMENT, AiravataUtils.getId("EXPERIMENT"), gatewayId);
             msgCtx.setUpdatedTime(new Timestamp(Calendar.getInstance().getTimeInMillis()));
             publisher.publish(msgCtx);
         }
     }
 
-    private void publishNodeStatusChange(WorkflowNodeState state, String nodeId , String expId)
+    private void publishNodeStatusChange(ProcessState state, String nodeId , String expId)
             throws AiravataException {
         if (publisher != null) {
-            MessageContext msgCtx = new MessageContext(new WorkflowNodeStatusChangeEvent(state, new WorkflowIdentifier(nodeId,
-                    expId, gatewayId)), MessageType.WORKFLOWNODE, AiravataUtils.getId("NODE"), gatewayId);
+            MessageContext msgCtx = new MessageContext(new ProcessStatusChangeEvent(state, new ProcessIdentifier(nodeId,
+                    expId, gatewayId)), MessageType.PROCESS, AiravataUtils.getId("NODE"), gatewayId);
             msgCtx.setUpdatedTime(new Timestamp(Calendar.getInstance().getTimeInMillis()));
             publisher.publish(msgCtx);
         } else {
@@ -1399,11 +1401,11 @@ public class WorkflowInterpreter implements AbstractActivityListener{
         return workflowInterpreterConfigurationThreadLocal.get();
     }
 
-	public Experiment getExperiment() {
+	public ExperimentModel getExperiment() {
 		return experiment;
 	}
 
-	public void setExperiment(Experiment experiment) {
+	public void setExperiment(ExperimentModel experiment) {
 		this.experiment = experiment;
 	}
 
@@ -1424,7 +1426,7 @@ public class WorkflowInterpreter implements AbstractActivityListener{
     public void taskOutputChanged(TaskOutputChangeEvent taskOutputEvent){
 		String taskId = taskOutputEvent.getTaskIdentity().getTaskId();
 		if (isTaskAwaiting(taskId)){
-        	WorkflowNodeState state=WorkflowNodeState.COMPLETED;
+        	ProcessState state=ProcessState.COMPLETED;
 			Node node = getAwaitingNodeForTask(taskId);
     		List<OutputDataObjectType> applicationOutputs = taskOutputEvent.getOutput();
 			Map<String, String> outputData = new HashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/invoker/WorkflowInputUtil.java
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/invoker/WorkflowInputUtil.java b/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/invoker/WorkflowInputUtil.java
index c1a048b..0988ad8 100644
--- a/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/invoker/WorkflowInputUtil.java
+++ b/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/invoker/WorkflowInputUtil.java
@@ -21,7 +21,7 @@
 package org.apache.airavata.workflow.engine.invoker;
 
 import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.application.io.DataType;
 import org.apache.airavata.workflow.model.component.ws.WSComponentPort;
 
 public class WorkflowInputUtil {

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/ComponentDataPort.java
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/ComponentDataPort.java b/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/ComponentDataPort.java
index 198a3e2..f296e62 100644
--- a/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/ComponentDataPort.java
+++ b/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/ComponentDataPort.java
@@ -23,7 +23,7 @@ package org.apache.airavata.workflow.model.component;
 
 import javax.xml.namespace.QName;
 
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.application.io.DataType;
 import org.apache.airavata.workflow.model.graph.DataPort;
 
 public abstract class ComponentDataPort extends ComponentPort {

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/amazon/InstanceComponentDataPort.java
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/amazon/InstanceComponentDataPort.java b/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/amazon/InstanceComponentDataPort.java
index 9702658..0d60906 100644
--- a/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/amazon/InstanceComponentDataPort.java
+++ b/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/amazon/InstanceComponentDataPort.java
@@ -22,7 +22,7 @@
 package org.apache.airavata.workflow.model.component.amazon;
 
 import org.apache.airavata.common.utils.WSConstants;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.application.io.DataType;
 import org.apache.airavata.workflow.model.component.ComponentDataPort;
 import org.apache.airavata.workflow.model.graph.DataPort;
 import org.apache.airavata.workflow.model.graph.amazon.InstanceDataPort;

http://git-wip-us.apache.org/repos/asf/airavata/blob/ab322333/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/dynamic/DynamicComponentPort.java
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/dynamic/DynamicComponentPort.java b/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/dynamic/DynamicComponentPort.java
index f5fa500..a2fda0b 100644
--- a/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/dynamic/DynamicComponentPort.java
+++ b/modules/workflow-model/workflow-model-core/src/main/java/org/apache/airavata/workflow/model/component/dynamic/DynamicComponentPort.java
@@ -21,7 +21,7 @@
 
 package org.apache.airavata.workflow.model.component.dynamic;
 
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.application.io.DataType;
 import org.apache.airavata.workflow.model.component.ComponentDataPort;
 import org.apache.airavata.workflow.model.graph.DataPort;
 import org.apache.airavata.workflow.model.graph.dynamic.DynamicPort;