You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by di...@apache.org on 2018/03/07 21:09:57 UTC

[airavata] branch helix-integration updated (8632150 -> 71b294e)

This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a change to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git.


    from 8632150  changing dev email address to devjobs
     new c9a1b06  Initial helix migration
     new ef94a5a  Building groovy map
     new b199bc2  Stabalizing DefaultJobSubmission Task
     new cb54e4d  Fixing env setup task
     new 7350b25  Implementing DataStaging tasks
     new 573dbab  Fixing bugs in pre workflow
     new 4e1c1b0  Standalone email monitor initial implementation
     new 42ff5f4  Implementing post workflow
     new ca45564  Configuring pre workflow manager to read from rabbitmq launch queue
     new 1c3a5d4  Improving status publishing
     new f51f1f1  Thread safe entity manager factory
     new 71075e0  Logging improvements
     new 782b0e8  Refactoring
     new e864db3  Adding deployment module to helix workflows
     new a726a98  Moving helix-spectator module to airavata-helix module
     new 0053413  Moving helix-spectator module to airavata-helix module
     new 71b294e  Refactoring

The 17 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 modules/airavata-helix-distribution/pom.xml        |  55 ++
 .../src/main/assembly/bin-assembly.xml             | 111 +++
 .../resources/bin/airavata-controller-start.sh     |  99 +++
 .../main/resources/bin/airavata-controller-stop.sh |  71 ++
 .../main/resources/bin/airavata-monitor-start.sh   |  99 +++
 .../resources/bin/airavata-participant-start.sh    |  99 +++
 .../resources/bin/airavata-participant-stop.sh     |  71 ++
 .../main/resources/bin/airavata-post-wm-start.sh   |  99 +++
 .../main/resources/bin/airavata-post-wm-stop.sh    |  71 ++
 .../main/resources/bin/airavata-pre-wm-start.sh    |  99 +++
 .../src/main/resources/bin/airavata-pre-wm-stop.sh |  71 ++
 .../src/main/resources/bin/setenv.sh               |   0
 .../main/resources/conf/airavata-server.properties | 345 ++++++++
 .../src/main/resources/conf/application.properties |   3 +
 .../src/main/resources/conf/cred_store.jks         | Bin 0 -> 499 bytes
 .../src/main/resources/conf/log4j.properties       |  13 +
 modules/airavata-helix/agent-api/pom.xml           |  47 ++
 .../apache/airavata/agents/api/AdaptorParams.java  |  26 +
 .../java/org/apache/airavata/agents/api/Agent.java |  10 +
 .../apache/airavata/agents/api/AgentAdaptor.java   |  27 +
 .../apache/airavata/agents/api/AgentException.java |  30 +
 .../org/apache/airavata/agents/api/AgentStore.java | 103 +++
 .../apache/airavata/agents/api/CommandOutput.java  |  16 +
 .../airavata/agents/api/JobSubmissionOutput.java   |  83 ++
 .../agents/api/StorageResourceAdaptor.java         |   7 +
 modules/airavata-helix/agent-impl/pom.xml          |  27 +
 .../airavata-helix/agent-impl/ssh-agent/pom.xml    |  73 ++
 .../helix/agent/local/LocalAgentAdaptor.java       |  52 ++
 .../airavata/helix/agent/ssh/SshAdaptorParams.java | 116 +++
 .../airavata/helix/agent/ssh/SshAgentAdaptor.java  | 576 ++++++++++++++
 .../helix/agent/ssh/StandardOutReader.java         |  51 ++
 .../agent/storage/StorageResourceAdaptorImpl.java  |  85 ++
 modules/airavata-helix/helix-spectator/pom.xml     |  69 ++
 .../helix/impl/controller/HelixController.java     |  91 +++
 .../helix/impl/participant/GlobalParticipant.java  |  77 ++
 .../airavata/helix/impl/task/AiravataTask.java     | 335 ++++++++
 .../airavata/helix/impl/task/TaskContext.java      | 883 +++++++++++++++++++++
 .../helix/impl/task/TaskOnFailException.java       |  27 +
 .../helix/impl/task/completing/CompletingTask.java |  29 +
 .../airavata/helix/impl/task/env/EnvSetupTask.java |  43 +
 .../helix/impl/task/staging/DataStagingTask.java   |  97 +++
 .../impl/task/staging/InputDataStagingTask.java    | 114 +++
 .../impl/task/staging/OutputDataStagingTask.java   | 202 +++++
 .../task/submission/DefaultJobSubmissionTask.java  | 200 +++++
 .../task/submission/ForkJobSubmissionTask.java     |  84 ++
 .../impl/task/submission/JobSubmissionTask.java    | 195 +++++
 .../task/submission/LocalJobSubmissionTask.java    |  87 ++
 .../task/submission/config/GroovyMapBuilder.java   | 406 ++++++++++
 .../impl/task/submission/config/GroovyMapData.java | 466 +++++++++++
 .../impl/task/submission/config/JobFactory.java    | 100 +++
 .../submission/config/JobManagerConfiguration.java |  29 +
 .../impl/task/submission/config/OutputParser.java  |  41 +
 .../task/submission/config/RawCommandInfo.java     |  22 +
 .../helix/impl/task/submission/config/Script.java  |  43 +
 .../impl/task/submission/config/ScriptTag.java     |  13 +
 .../task/submission/config/SubmissionUtil.java     |  10 +
 .../config/app/ForkJobConfiguration.java           | 113 +++
 .../impl/task/submission/config/app/JobUtil.java   |  58 ++
 .../submission/config/app/LSFJobConfiguration.java | 120 +++
 .../submission/config/app/PBSJobConfiguration.java | 122 +++
 .../config/app/SlurmJobConfiguration.java          | 117 +++
 .../submission/config/app/UGEJobConfiguration.java | 117 +++
 .../parser/AiravataCustomCommandOutputParser.java  |  56 ++
 .../config/app/parser/ForkOutputParser.java        |  58 ++
 .../config/app/parser/LSFOutputParser.java         | 132 +++
 .../config/app/parser/PBSOutputParser.java         | 142 ++++
 .../config/app/parser/SlurmOutputParser.java       | 137 ++++
 .../config/app/parser/UGEOutputParser.java         | 107 +++
 .../helix/impl/workflow/PostWorkflowManager.java   | 266 +++++++
 .../helix/impl/workflow/PreWorkflowManager.java    | 145 ++++
 .../src/main/resources/airavata-server.properties  | 270 +++++++
 .../src/main/resources/application.properties      |   3 +
 .../src/main/resources/log4j.properties            |  13 +
 modules/airavata-helix/pom.xml                     |  25 +
 modules/airavata-helix/task-api/pom.xml            |  41 +
 .../apache/airavata/helix/task/api/TaskHelper.java |  14 +
 .../helix/task/api/annotation/TaskDef.java         |  18 +
 .../helix/task/api/annotation/TaskOutPort.java     |  18 +
 .../helix/task/api/annotation/TaskParam.java       |  20 +
 .../helix/task/api/support/AdaptorSupport.java     |  19 +
 .../src/main/resources/application.properties      |   3 +
 .../task-api/src/main/resources/log4j.properties   |   9 +
 modules/airavata-helix/task-core/pom.xml           |  52 ++
 .../apache/airavata/helix/core/AbstractTask.java   | 118 +++
 .../org/apache/airavata/helix/core/OutPort.java    |  44 +
 .../helix/core/participant/HelixParticipant.java   | 172 ++++
 .../helix/core/support/AdaptorSupportImpl.java     |  46 ++
 .../helix/core/support/TaskHelperImpl.java         |  17 +
 .../airavata/helix/core/util/PropertyResolver.java |  44 +
 .../apache/airavata/helix/core/util/TaskUtil.java  | 106 +++
 modules/airavata-helix/workflow-impl/pom.xml       |  44 +
 .../airavata/helix/workflow/SimpleWorkflow.java    |  40 +
 .../airavata/helix/workflow/WorkflowManager.java   | 104 +++
 modules/job-monitor/pom.xml                        |  43 +
 .../airavata/job/monitor/EmailBasedMonitor.java    | 312 ++++++++
 .../monitor/kafka/JobStatusResultDeserializer.java |  34 +
 .../monitor/kafka/JobStatusResultSerializer.java   |  29 +
 .../job/monitor/kafka/MessageProducer.java         |  37 +
 .../monitor/parser/AiravataCustomMailParser.java   |  77 ++
 .../airavata/job/monitor/parser/EmailParser.java   |  34 +
 .../job/monitor/parser/JobStatusResult.java        |  63 ++
 .../job/monitor/parser/LSFEmailParser.java         |  78 ++
 .../job/monitor/parser/PBSEmailParser.java         | 105 +++
 .../job/monitor/parser/ResourceConfig.java         |  54 ++
 .../job/monitor/parser/SLURMEmailParser.java       |  83 ++
 .../job/monitor/parser/UGEEmailParser.java         | 109 +++
 .../src/main/resources/airavata-server.properties  | 334 ++++++++
 .../src/main/resources/email-config.yaml           |  20 +
 .../src/main/resources/log4j.properties            |   9 +
 .../core/app/catalog/util/AppCatalogJPAUtils.java  |  61 +-
 pom.xml                                            |   8 +-
 111 files changed, 10689 insertions(+), 29 deletions(-)
 create mode 100644 modules/airavata-helix-distribution/pom.xml
 create mode 100644 modules/airavata-helix-distribution/src/main/assembly/bin-assembly.xml
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-start.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-stop.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-monitor-start.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-start.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-stop.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-start.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-stop.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-start.sh
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-stop.sh
 copy modules/{distribution => airavata-helix-distribution}/src/main/resources/bin/setenv.sh (100%)
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/conf/airavata-server.properties
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/conf/application.properties
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/conf/cred_store.jks
 create mode 100644 modules/airavata-helix-distribution/src/main/resources/conf/log4j.properties
 create mode 100644 modules/airavata-helix/agent-api/pom.xml
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AdaptorParams.java
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/Agent.java
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentException.java
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentStore.java
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/CommandOutput.java
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java
 create mode 100644 modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/StorageResourceAdaptor.java
 create mode 100644 modules/airavata-helix/agent-impl/pom.xml
 create mode 100644 modules/airavata-helix/agent-impl/ssh-agent/pom.xml
 create mode 100644 modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java
 create mode 100644 modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAdaptorParams.java
 create mode 100644 modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
 create mode 100644 modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java
 create mode 100644 modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/storage/StorageResourceAdaptorImpl.java
 create mode 100644 modules/airavata-helix/helix-spectator/pom.xml
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/resources/airavata-server.properties
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/resources/application.properties
 create mode 100644 modules/airavata-helix/helix-spectator/src/main/resources/log4j.properties
 create mode 100644 modules/airavata-helix/pom.xml
 create mode 100644 modules/airavata-helix/task-api/pom.xml
 create mode 100644 modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java
 create mode 100644 modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskDef.java
 create mode 100644 modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskOutPort.java
 create mode 100644 modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskParam.java
 create mode 100644 modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
 create mode 100644 modules/airavata-helix/task-api/src/main/resources/application.properties
 create mode 100644 modules/airavata-helix/task-api/src/main/resources/log4j.properties
 create mode 100644 modules/airavata-helix/task-core/pom.xml
 create mode 100644 modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java
 create mode 100644 modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/OutPort.java
 create mode 100644 modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
 create mode 100644 modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
 create mode 100644 modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java
 create mode 100644 modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/PropertyResolver.java
 create mode 100644 modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java
 create mode 100644 modules/airavata-helix/workflow-impl/pom.xml
 create mode 100644 modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/SimpleWorkflow.java
 create mode 100644 modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
 create mode 100644 modules/job-monitor/pom.xml
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultDeserializer.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultSerializer.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/AiravataCustomMailParser.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/EmailParser.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/JobStatusResult.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/LSFEmailParser.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/PBSEmailParser.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/ResourceConfig.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/SLURMEmailParser.java
 create mode 100644 modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/UGEEmailParser.java
 create mode 100644 modules/job-monitor/src/main/resources/airavata-server.properties
 create mode 100644 modules/job-monitor/src/main/resources/email-config.yaml
 create mode 100644 modules/job-monitor/src/main/resources/log4j.properties

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 08/17: Implementing post workflow

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 42ff5f4e2fbdbb1421f686b702a5eb76918bb4d3
Author: dimuthu <di...@gmail.com>
AuthorDate: Sun Mar 4 13:21:55 2018 -0500

    Implementing post workflow
---
 modules/helix-spectator/pom.xml                    |  11 +-
 .../airavata/helix/impl/task/AiravataTask.java     |   2 +-
 .../helix/impl/task/OutputDataStagingTask.java     |  52 ++++-
 .../submission/task/DefaultJobSubmissionTask.java  |   3 +
 .../task/submission/task/JobSubmissionTask.java    |  32 +++
 .../helix/impl/workflow/PostWorkflowManager.java   | 256 +++++++++++++++++++++
 ...SimpleWorkflow.java => PreWorkflowManager.java} |   2 +-
 modules/job-monitor/pom.xml                        |   5 +
 .../airavata/job/monitor/EmailBasedMonitor.java    |   7 +-
 .../monitor/kafka/JobStatusResultDeserializer.java |  34 +++
 .../monitor/kafka/JobStatusResultSerializer.java   |  29 +++
 .../job/monitor/kafka/MessageProducer.java         |  36 +++
 12 files changed, 460 insertions(+), 9 deletions(-)

diff --git a/modules/helix-spectator/pom.xml b/modules/helix-spectator/pom.xml
index 36fb586..213f747 100644
--- a/modules/helix-spectator/pom.xml
+++ b/modules/helix-spectator/pom.xml
@@ -50,6 +50,15 @@
             <artifactId>groovy-templates</artifactId>
             <version>2.4.7</version>
         </dependency>
-
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+            <version>1.0.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>job-monitor</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
     </dependencies>
 </project>
\ No newline at end of file
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
index 26361d2..e15195d 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
@@ -79,7 +79,7 @@ public abstract class AiravataTask extends AbstractTask {
         super.init(manager, workflowName, jobName, taskName);
         try {
             appCatalog = RegistryFactory.getAppCatalog();
-            experimentCatalog = RegistryFactory.getDefaultExpCatalog();
+            experimentCatalog = RegistryFactory.getExperimentCatalog(getGatewayId());
             processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
 
             this.computeResourceDescription = getAppCatalog().getComputeResource().getComputeResource(getProcessModel()
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
index d2280d0..f33523c 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
@@ -28,6 +28,7 @@ public class OutputDataStagingTask extends DataStagingTask {
     @Override
     public TaskResult onRun(TaskHelper taskHelper) {
 
+        logger.info("Starting output data staging task " + getTaskId());
         try {
             // Get and validate data staging task model
             DataStagingTaskModel dataStagingTaskModel = getDataStagingTaskModel();
@@ -56,14 +57,37 @@ public class OutputDataStagingTask extends DataStagingTask {
             String sourceFileName;
             try {
                 sourceURI = new URI(dataStagingTaskModel.getSource());
-                destinationURI = new URI(dataStagingTaskModel.getDestination());
+                sourceFileName = sourceURI.getPath().substring(sourceURI.getPath().lastIndexOf(File.separator) + 1,
+                        sourceURI.getPath().length());
+
+                if (dataStagingTaskModel.getDestination().startsWith("dummy")) {
+                    String inputPath  = getTaskContext().getStorageFileSystemRootLocation();
+                    inputPath = (inputPath.endsWith(File.separator) ? inputPath : inputPath + File.separator);
+                    String experimentDataDir = getProcessModel().getExperimentDataDir();
+                    String filePath;
+                    if(experimentDataDir != null && !experimentDataDir.isEmpty()) {
+                        if(!experimentDataDir.endsWith(File.separator)){
+                            experimentDataDir += File.separator;
+                        }
+                        if (experimentDataDir.startsWith(File.separator)) {
+                            filePath = experimentDataDir + sourceFileName;
+                        } else {
+                            filePath = inputPath + experimentDataDir + sourceFileName;
+                        }
+                    } else {
+                        filePath = inputPath + getProcessId() + File.separator + sourceFileName;
+                    }
+
+                    destinationURI = new URI("file", getTaskContext().getStorageResourceLoginUserName(),
+                            storageResource.getHostName(), 22, filePath, null, null);
+
+                } else {
+                    destinationURI = new URI(dataStagingTaskModel.getDestination());
+                }
 
                 if (logger.isDebugEnabled()) {
                     logger.debug("Source file " + sourceURI.getPath() + ", destination uri " + destinationURI.getPath() + " for task " + getTaskId());
                 }
-
-                sourceFileName = sourceURI.getPath().substring(sourceURI.getPath().lastIndexOf(File.separator) + 1,
-                        sourceURI.getPath().length());
             } catch (URISyntaxException e) {
                 throw new TaskOnFailException("Failed to obtain source URI for output data staging task " + getTaskId(), true, e);
             }
@@ -164,6 +188,26 @@ public class OutputDataStagingTask extends DataStagingTask {
         }
     }
 
+    public URI getDestinationURIFromDummy(String hostName, String inputPath, String fileName) throws URISyntaxException {
+        String experimentDataDir = getProcessModel().getExperimentDataDir();
+        String filePath;
+        if(experimentDataDir != null && !experimentDataDir.isEmpty()) {
+            if(!experimentDataDir.endsWith(File.separator)){
+                experimentDataDir += File.separator;
+            }
+            if (experimentDataDir.startsWith(File.separator)) {
+                filePath = experimentDataDir + fileName;
+            } else {
+                filePath = inputPath + experimentDataDir + fileName;
+            }
+        } else {
+            filePath = inputPath + getProcessId() + File.separator + fileName;
+        }
+        //FIXME
+        return new URI("file", getTaskContext().getStorageResourceLoginUserName(), hostName, 22, filePath, null, null);
+
+    }
+
     @Override
     public void onCancel() {
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
index e21f200..a60a955 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -36,6 +36,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
 
     @Override
     public TaskResult onRun(TaskHelper taskHelper) {
+
         try {
 
             GroovyMapData mapData = new GroovyMapBuilder(getTaskContext()).build();
@@ -126,6 +127,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                     logger.info("Received job id " + jobId + " from compute resource");
                     jobModel.setJobId(jobId);
                     saveJobModel(jobModel);
+
                     JobStatus jobStatus = new JobStatus();
                     jobStatus.setJobState(JobState.SUBMITTED);
                     jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
@@ -139,6 +141,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                         jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                         jobModel.setJobStatuses(Arrays.asList(jobStatus));
                         saveJobStatus(jobModel);
+                        createMonitoringNode(jobId);
                     }
 
                     if (getComputeResourceDescription().isGatewayUsageReporting()){
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
index ac314e9..afa2630 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task.submission.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.CommandOutput;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
+import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.impl.task.AiravataTask;
@@ -27,9 +28,15 @@ import org.apache.airavata.model.messaging.event.MessageType;
 import org.apache.airavata.model.status.JobStatus;
 import org.apache.airavata.registry.cpi.*;
 import org.apache.commons.io.FileUtils;
+import org.apache.curator.RetryPolicy;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
 import org.apache.helix.HelixManager;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.ZooDefs;
 
 import java.io.File;
 import java.security.SecureRandom;
@@ -39,9 +46,34 @@ public abstract class JobSubmissionTask extends AiravataTask {
 
     private static final Logger logger = LogManager.getLogger(JobSubmissionTask.class);
 
+    private CuratorFramework curatorClient = null;
+
     @Override
     public void init(HelixManager manager, String workflowName, String jobName, String taskName) {
         super.init(manager, workflowName, jobName, taskName);
+        RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
+        try {
+            this.curatorClient = CuratorFrameworkFactory.newClient(ServerSettings.getZookeeperConnection(), retryPolicy);
+            this.curatorClient.start();
+        } catch (ApplicationSettingsException e) {
+            e.printStackTrace();
+            logger.error("Failed to create curator client ", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    public CuratorFramework getCuratorClient() {
+        return curatorClient;
+    }
+
+    // TODO perform exception handling
+    protected void createMonitoringNode(String jobId) throws Exception {
+        logger.info("Creating zookeeper paths for job monitoring for job id : " + jobId + ", process : "
+                + getProcessId() + ", gateway : " + getGatewayId());
+        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/lock", new byte[0]);
+        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/gateway", getGatewayId().getBytes());
+        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/process", getProcessId().getBytes());
+        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/status", "pending".getBytes());
     }
 
     //////////////////////
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
new file mode 100644
index 0000000..25f8ec5
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
@@ -0,0 +1,256 @@
+package org.apache.airavata.helix.impl.workflow;
+
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.helix.core.OutPort;
+import org.apache.airavata.helix.impl.task.AiravataTask;
+import org.apache.airavata.helix.impl.task.EnvSetupTask;
+import org.apache.airavata.helix.impl.task.InputDataStagingTask;
+import org.apache.airavata.helix.impl.task.OutputDataStagingTask;
+import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
+import org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask;
+import org.apache.airavata.helix.workflow.WorkflowManager;
+import org.apache.airavata.job.monitor.kafka.JobStatusResultDeserializer;
+import org.apache.airavata.job.monitor.parser.JobStatusResult;
+import org.apache.airavata.model.experiment.ExperimentModel;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.task.TaskModel;
+import org.apache.airavata.model.task.TaskTypes;
+import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
+import org.apache.airavata.registry.cpi.AppCatalog;
+import org.apache.airavata.registry.cpi.ExperimentCatalog;
+import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
+import org.apache.curator.RetryPolicy;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.serialization.LongDeserializer;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+import org.apache.zookeeper.data.Stat;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+public class PostWorkflowManager {
+
+    private static final Logger logger = LogManager.getLogger(PostWorkflowManager.class);
+
+    private final String BOOTSTRAP_SERVERS = "localhost:9092";
+    private final String TOPIC = "parsed-data";
+
+    private CuratorFramework curatorClient = null;
+
+    private void init() throws ApplicationSettingsException {
+        RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
+        this.curatorClient = CuratorFrameworkFactory.newClient(ServerSettings.getZookeeperConnection(), retryPolicy);
+        this.curatorClient.start();
+    }
+
+    private Consumer<String, JobStatusResult> createConsumer() {
+        final Properties props = new Properties();
+        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
+        props.put(ConsumerConfig.GROUP_ID_CONFIG, "MonitoringConsumer");
+        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JobStatusResultDeserializer.class.getName());
+        // Create the consumer using props.
+        final Consumer<String, JobStatusResult> consumer = new KafkaConsumer<String, JobStatusResult>(props);
+        // Subscribe to the topic.
+        consumer.subscribe(Collections.singletonList(TOPIC));
+        return consumer;
+    }
+
+    private String getProcessIdByJobId(String jobId) throws Exception {
+        byte[] processBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/process");
+        String process = new String(processBytes);
+        return process;
+    }
+
+    private String getGatewayByJobId(String jobId) throws Exception {
+        byte[] gatewayBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/gateway");
+        String gateway = new String(gatewayBytes);
+        return gateway;
+    }
+
+    private String getStatusByJobId(String jobId) throws Exception {
+        byte[] statusBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/status");
+        String status = new String(statusBytes);
+        return status;
+    }
+
+    private boolean hasMonitoringRegistered(String jobId) throws Exception {
+        Stat stat = this.curatorClient.checkExists().forPath("/monitoring/" + jobId);
+        return stat != null;
+    }
+
+    private void process(JobStatusResult jobStatusResult) {
+
+        if (jobStatusResult == null) {
+            return;
+        }
+
+        try {
+            logger.info("Processing job result " + jobStatusResult.getJobId());
+
+            if (hasMonitoringRegistered(jobStatusResult.getJobId())) {
+                String gateway = getGatewayByJobId(jobStatusResult.getJobId());
+                String processId = getProcessIdByJobId(jobStatusResult.getJobId());
+                String status = getStatusByJobId(jobStatusResult.getJobId());
+
+                // TODO get cluster lock before that
+                if ("cancelled".equals(status)) {
+
+                } else {
+
+                    if (jobStatusResult.getState() == JobState.COMPLETE) {
+                        logger.info("Job " + jobStatusResult.getJobId() + " was completed");
+
+                        ExperimentCatalog experimentCatalog = RegistryFactory.getExperimentCatalog(gateway);
+                        ProcessModel processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
+                        ExperimentModel experimentModel = (ExperimentModel) experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT, processModel.getExperimentId());
+                        String taskDag = processModel.getTaskDag();
+                        List<TaskModel> taskList = processModel.getTasks();
+
+                        String[] taskIds = taskDag.split(",");
+                        final List<AiravataTask> allTasks = new ArrayList<>();
+
+                        boolean jobSubmissionFound = false;
+
+                        for (String taskId : taskIds) {
+                            Optional<TaskModel> model = taskList.stream().filter(taskModel -> taskModel.getTaskId().equals(taskId)).findFirst();
+
+                            if (model.isPresent()) {
+                                TaskModel taskModel = model.get();
+                                AiravataTask airavataTask = null;
+                                if (taskModel.getTaskType() == TaskTypes.JOB_SUBMISSION) {
+                                    jobSubmissionFound = true;
+                                } else if (taskModel.getTaskType() == TaskTypes.DATA_STAGING) {
+                                    if (jobSubmissionFound) {
+                                        airavataTask = new OutputDataStagingTask();
+                                    }
+                                }
+
+                                if (airavataTask != null) {
+                                    airavataTask.setGatewayId(experimentModel.getGatewayId());
+                                    airavataTask.setExperimentId(experimentModel.getExperimentId());
+                                    airavataTask.setProcessId(processModel.getProcessId());
+                                    airavataTask.setTaskId(taskModel.getTaskId());
+                                    if (allTasks.size() > 0) {
+                                        allTasks.get(allTasks.size() - 1).setNextTask(new OutPort(airavataTask.getTaskId(), airavataTask));
+                                    }
+                                    allTasks.add(airavataTask);
+                                }
+                            }
+                        }
+                        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster",
+                                "wm-23", ServerSettings.getZookeeperConnection());
+
+                        workflowManager.launchWorkflow(UUID.randomUUID().toString(),
+                                allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true);
+
+                    } else if (jobStatusResult.getState() == JobState.CANCELED) {
+                        logger.info("Job " + jobStatusResult.getJobId() + " was externally cancelled");
+
+                    } else if (jobStatusResult.getState() == JobState.FAILED) {
+                        logger.info("Job " + jobStatusResult.getJobId() + " was failed");
+
+                    } else if (jobStatusResult.getState() == JobState.SUBMITTED) {
+                        logger.info("Job " + jobStatusResult.getJobId() + " was submitted");
+
+                    }
+                }
+            } else {
+                logger.warn("Could not find a monitoring register for job id " + jobStatusResult.getJobId());
+            }
+        } catch (Exception e) {
+            logger.error("Failed to process job : " + jobStatusResult.getJobId() + ", with status : " + jobStatusResult.getState().name(), e);
+        }
+    }
+
+    private void runConsumer() throws InterruptedException {
+        final Consumer<String, JobStatusResult> consumer = createConsumer();
+
+        final int giveUp = 100;   int noRecordsCount = 0;
+
+        while (true) {
+            final ConsumerRecords<String, JobStatusResult> consumerRecords = consumer.poll(1000);
+
+            /*if (consumerRecords.count() == 0) {
+                noRecordsCount++;
+                if (noRecordsCount > giveUp) break;
+                else continue;
+            }*/
+
+            consumerRecords.forEach(record -> {
+                process(record.value());
+            });
+
+            consumer.commitAsync();
+        }
+        //consumer.close();
+        //System.out.println("DONE");
+    }
+
+    public static void main(String[] args) throws Exception {
+
+        PostWorkflowManager postManager = new PostWorkflowManager();
+        postManager.init();
+        postManager.runConsumer();
+        /*
+        String processId = "PROCESS_5b252ad9-d630-4cf9-80e3-0c30c55d1001";
+        ExperimentCatalog experimentCatalog = RegistryFactory.getDefaultExpCatalog();
+
+        ProcessModel processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
+        ExperimentModel experimentModel = (ExperimentModel) experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT, processModel.getExperimentId());
+        String taskDag = processModel.getTaskDag();
+        List<TaskModel> taskList = processModel.getTasks();
+
+        String[] taskIds = taskDag.split(",");
+        final List<AiravataTask> allTasks = new ArrayList<>();
+
+        boolean jobSubmissionFound = false;
+
+        for (String taskId : taskIds) {
+            Optional<TaskModel> model = taskList.stream().filter(taskModel -> taskModel.getTaskId().equals(taskId)).findFirst();
+
+            if (model.isPresent()) {
+                TaskModel taskModel = model.get();
+                AiravataTask airavataTask = null;
+                if (taskModel.getTaskType() == TaskTypes.ENV_SETUP) {
+                    //airavataTask = new EnvSetupTask();
+                } else if (taskModel.getTaskType() == TaskTypes.JOB_SUBMISSION) {
+                    //airavataTask = new DefaultJobSubmissionTask();
+                    //airavataTask.setRetryCount(1);
+                    jobSubmissionFound = true;
+                } else if (taskModel.getTaskType() == TaskTypes.DATA_STAGING) {
+                    if (jobSubmissionFound) {
+                        airavataTask = new OutputDataStagingTask();
+                    } else {
+                        //airavataTask = new InputDataStagingTask();
+                    }
+                }
+
+                if (airavataTask != null) {
+                    airavataTask.setGatewayId(experimentModel.getGatewayId());
+                    airavataTask.setExperimentId(experimentModel.getExperimentId());
+                    airavataTask.setProcessId(processModel.getProcessId());
+                    airavataTask.setTaskId(taskModel.getTaskId());
+                    if (allTasks.size() > 0) {
+                        allTasks.get(allTasks.size() -1).setNextTask(new OutPort(airavataTask.getTaskId(), airavataTask));
+                    }
+                    allTasks.add(airavataTask);
+                }
+            }
+        }
+
+        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster", "wm-22", "localhost:2199");
+        workflowManager.launchWorkflow(UUID.randomUUID().toString(), allTasks.stream().map(t -> (AiravataTask)t).collect(Collectors.toList()), true);
+        */
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
similarity index 99%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
index abd36e1..9814b01 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
@@ -23,7 +23,7 @@ import java.util.Optional;
 import java.util.UUID;
 import java.util.stream.Collectors;
 
-public class SimpleWorkflow {
+public class PreWorkflowManager {
 
     public static void main(String[] args) throws Exception {
 
diff --git a/modules/job-monitor/pom.xml b/modules/job-monitor/pom.xml
index c536a14..7a69882 100644
--- a/modules/job-monitor/pom.xml
+++ b/modules/job-monitor/pom.xml
@@ -33,6 +33,11 @@
             <artifactId>snakeyaml</artifactId>
             <version>1.15</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+            <version>1.0.0</version>
+        </dependency>
     </dependencies>
 
 </project>
\ No newline at end of file
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java
index 7b13354..e41f500 100644
--- a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java
@@ -2,6 +2,7 @@ package org.apache.airavata.job.monitor;
 
 import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.job.monitor.kafka.MessageProducer;
 import org.apache.airavata.job.monitor.parser.EmailParser;
 import org.apache.airavata.job.monitor.parser.JobStatusResult;
 import org.apache.airavata.job.monitor.parser.ResourceConfig;
@@ -48,6 +49,7 @@ public class EmailBasedMonitor implements Runnable {
     private Map<String, Boolean> canceledJobs = new ConcurrentHashMap<>();
     private Timer timer;
     private Map<ResourceJobManagerType, ResourceConfig> resourceConfigs = new HashMap<>();
+    private MessageProducer messageProducer = new MessageProducer();
 
 
     public EmailBasedMonitor() throws Exception {
@@ -235,8 +237,9 @@ public class EmailBasedMonitor implements Runnable {
             try {
                 JobStatusResult jobStatusResult = parse(message);
                 log.info(jobStatusResult.getJobId() + ", " + jobStatusResult.getJobName() + ", " + jobStatusResult.getState().getValue());
-                //processedMessages.add(message);
-                unreadMessages.add(message);
+                messageProducer.submitMessageToQueue(jobStatusResult);
+                processedMessages.add(message);
+                //unreadMessages.add(message);
             } catch (Exception e) {
                 unreadMessages.add(message);
             }
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultDeserializer.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultDeserializer.java
new file mode 100644
index 0000000..c3c7877
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultDeserializer.java
@@ -0,0 +1,34 @@
+package org.apache.airavata.job.monitor.kafka;
+
+import org.apache.airavata.job.monitor.parser.JobStatusResult;
+import org.apache.airavata.model.status.JobState;
+import org.apache.kafka.common.serialization.Deserializer;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectInputStream;
+import java.util.Map;
+
+public class JobStatusResultDeserializer implements Deserializer<JobStatusResult> {
+    @Override
+    public void configure(Map<String, ?> map, boolean b) {
+
+    }
+
+    @Override
+    public JobStatusResult deserialize(String s, byte[] bytes) {
+        String deserializedData = new String(bytes);
+        String[] parts = deserializedData.split(",");
+        JobStatusResult jobStatusResult = new JobStatusResult();
+        jobStatusResult.setJobId(parts[0]);
+        jobStatusResult.setJobName(parts[1]);
+        jobStatusResult.setState(JobState.valueOf(parts[2]));
+        return jobStatusResult;
+    }
+
+    @Override
+    public void close() {
+
+    }
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultSerializer.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultSerializer.java
new file mode 100644
index 0000000..a0dc6ec
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/JobStatusResultSerializer.java
@@ -0,0 +1,29 @@
+package org.apache.airavata.job.monitor.kafka;
+
+import org.apache.airavata.job.monitor.parser.JobStatusResult;
+import org.apache.kafka.common.serialization.Serializer;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectOutput;
+import java.io.ObjectOutputStream;
+import java.util.Map;
+
+public class JobStatusResultSerializer implements Serializer<JobStatusResult> {
+
+    @Override
+    public void configure(Map<String, ?> map, boolean b) {
+
+    }
+
+    @Override
+    public byte[] serialize(String s, JobStatusResult jobStatusResult) {
+        String serializedData = jobStatusResult.getJobId() + "," + jobStatusResult.getJobName() + "," + jobStatusResult.getState().name();
+        return serializedData.getBytes();
+    }
+
+    @Override
+    public void close() {
+
+    }
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java
new file mode 100644
index 0000000..748a533
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java
@@ -0,0 +1,36 @@
+package org.apache.airavata.job.monitor.kafka;
+
+import org.apache.airavata.job.monitor.parser.JobStatusResult;
+import org.apache.kafka.clients.producer.*;
+import org.apache.kafka.common.serialization.StringSerializer;
+
+import java.util.Properties;
+import java.util.concurrent.ExecutionException;
+
+public class MessageProducer {
+    private final static String TOPIC = "parsed-data";
+    private final static String BOOTSTRAP_SERVERS = "localhost:9092";
+
+    final Producer<String, JobStatusResult> producer;
+
+    public MessageProducer() {
+        producer = createProducer();
+    }
+
+    private Producer<String, JobStatusResult> createProducer() {
+        Properties props = new Properties();
+        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
+                BOOTSTRAP_SERVERS);
+        props.put(ProducerConfig.CLIENT_ID_CONFIG, "KafkaExampleProducer");
+        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
+                StringSerializer.class.getName());
+        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
+                JobStatusResultSerializer.class.getName());
+        return new KafkaProducer<String, JobStatusResult>(props);
+    }
+
+    public void submitMessageToQueue(JobStatusResult jobStatusResult) throws ExecutionException, InterruptedException {
+        final ProducerRecord<String, JobStatusResult> record = new ProducerRecord<>(TOPIC, jobStatusResult);
+        RecordMetadata recordMetadata = producer.send(record).get();
+    }
+}

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 06/17: Fixing bugs in pre workflow

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 573dbab1a29f1bf2f1fdb8c9cacdb7ad42b105ad
Author: dimuthu <di...@gmail.com>
AuthorDate: Fri Mar 2 13:16:49 2018 -0500

    Fixing bugs in pre workflow
---
 .../airavata/helix/agent/ssh/SshAgentAdaptor.java  |  4 +-
 .../apache/airavata/helix/core/AbstractTask.java   | 10 ++++
 .../airavata/helix/workflow/WorkflowManager.java   |  2 +-
 .../airavata/helix/impl/task/EnvSetupTask.java     |  2 +-
 .../airavata/helix/impl/task/TaskContext.java      | 68 +++++++++++++++++++++-
 .../impl/task/submission/GroovyMapBuilder.java     |  4 +-
 .../submission/task/DefaultJobSubmissionTask.java  |  6 +-
 .../task/submission/task/JobSubmissionTask.java    |  4 +-
 .../helix/impl/workflow/SimpleWorkflow.java        |  5 +-
 .../src/main/resources/application.properties      |  2 +-
 10 files changed, 92 insertions(+), 15 deletions(-)

diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
index 2ad2415..5392ab5 100644
--- a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
@@ -132,13 +132,12 @@ public class SshAgentAdaptor implements AgentAdaptor {
         ChannelExec channelExec = null;
         try {
             channelExec = ((ChannelExec) session.openChannel("exec"));
-            channelExec.setCommand(command);
+            channelExec.setCommand("cd " + workingDirectory + "; " + command);
             channelExec.setInputStream(null);
             InputStream out = channelExec.getInputStream();
             InputStream err = channelExec.getErrStream();
             channelExec.connect();
 
-            commandOutput.setExitCode(channelExec.getExitStatus());
             commandOutput.readStdOutFromStream(out);
             commandOutput.readStdErrFromStream(err);
             return commandOutput;
@@ -150,6 +149,7 @@ public class SshAgentAdaptor implements AgentAdaptor {
             throw new AgentException(e);
         } finally {
             if (channelExec != null) {
+                commandOutput.setExitCode(channelExec.getExitStatus());
                 channelExec.disconnect();
             }
         }
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java
index 04fa37f..5aca9cd 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java
@@ -26,6 +26,8 @@ public abstract class AbstractTask extends UserContentStore implements Task {
     private TaskCallbackContext callbackContext;
     private TaskHelper taskHelper;
 
+    private int retryCount = 3;
+
     @Override
     public void init(HelixManager manager, String workflowName, String jobName, String taskName) {
         super.init(manager, workflowName, jobName, taskName);
@@ -105,4 +107,12 @@ public abstract class AbstractTask extends UserContentStore implements Task {
         this.taskHelper = taskHelper;
         return this;
     }
+
+    public int getRetryCount() {
+        return retryCount;
+    }
+
+    public void setRetryCount(int retryCount) {
+        this.retryCount = retryCount;
+    }
 }
diff --git a/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
index ab7e3c4..9ecafb9 100644
--- a/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
+++ b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
@@ -61,7 +61,7 @@ public class WorkflowManager {
             JobConfig.Builder job = new JobConfig.Builder()
                     .addTaskConfigs(taskBuilds)
                     .setFailureThreshold(0)
-                    .setMaxAttemptsPerTask(3);
+                    .setMaxAttemptsPerTask(data.getRetryCount());
 
             if (!globalParticipant) {
                 job.setInstanceGroupTag(taskType);
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
index eafa53d..ddba5f2 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
@@ -28,7 +28,7 @@ public class EnvSetupTask extends AiravataTask {
             logger.info("Creating directory " + getTaskContext().getWorkingDir() + " on compute resource " + getTaskContext().getComputeResourceId());
             adaptor.createDirectory(getTaskContext().getWorkingDir());
             publishTaskState(TaskState.COMPLETED);
-            return onSuccess("Successfully completed");
+            return onSuccess("Envi setup task successfully completed " + getTaskId());
         } catch (Exception e) {
             try {
                 publishTaskState(TaskState.FAILED);
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
index 64a7de8..489a196 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
@@ -13,6 +13,8 @@ import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescr
 import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
 import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
 import org.apache.airavata.model.appcatalog.userresourceprofile.UserStoragePreference;
+import org.apache.airavata.model.application.io.DataType;
+import org.apache.airavata.model.application.io.OutputDataObjectType;
 import org.apache.airavata.model.data.movement.DataMovementProtocol;
 import org.apache.airavata.model.job.JobModel;
 import org.apache.airavata.model.process.ProcessModel;
@@ -23,11 +25,13 @@ import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
 import org.apache.airavata.registry.cpi.ExperimentCatalog;
+import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
 import java.util.*;
 
 public class TaskContext {
@@ -436,8 +440,38 @@ public class TaskContext {
         this.resourceJobManager = resourceJobManager;
     }
 
-    public ResourceJobManager getResourceJobManager() {
-        return resourceJobManager;
+    public ResourceJobManager getResourceJobManager() throws Exception {
+
+        if (this.resourceJobManager == null) {
+            JobSubmissionInterface jsInterface = getPreferredJobSubmissionInterface();
+
+            if (jsInterface == null) {
+                throw new Exception("Job Submission interface cannot be empty at this point");
+            } else if (jsInterface.getJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
+                SSHJobSubmission sshJobSubmission = getAppCatalog().getComputeResource().getSSHJobSubmission
+                        (jsInterface.getJobSubmissionInterfaceId());
+                // context method.
+                resourceJobManager = sshJobSubmission.getResourceJobManager();
+            } else if (jsInterface.getJobSubmissionProtocol() == JobSubmissionProtocol.LOCAL) {
+                LOCALSubmission localSubmission = getAppCatalog().getComputeResource().getLocalJobSubmission
+                        (jsInterface.getJobSubmissionInterfaceId());
+                resourceJobManager = localSubmission.getResourceJobManager();
+            } else if (jsInterface.getJobSubmissionProtocol() == JobSubmissionProtocol.SSH_FORK) {
+                SSHJobSubmission sshJobSubmission = getAppCatalog().getComputeResource().getSSHJobSubmission
+                        (jsInterface.getJobSubmissionInterfaceId());
+                resourceJobManager = sshJobSubmission.getResourceJobManager();
+            } else if (jsInterface.getJobSubmissionProtocol() == JobSubmissionProtocol.CLOUD) {
+                return null;
+            } else {
+                throw new Exception("Unsupported JobSubmissionProtocol - " + jsInterface.getJobSubmissionProtocol()
+                        .name());
+            }
+
+            if (resourceJobManager == null) {
+                throw new Exception("Resource Job Manager is empty.");
+            }
+        }
+        return this.resourceJobManager;
     }
 
     public String getLocalWorkingDir() {
@@ -794,6 +828,36 @@ public class TaskContext {
                     .getApplicationInterface(processModel.getApplicationInterfaceId()));
             ctx.setComputeResourceDescription(appCatalog.getComputeResource().getComputeResource
                     (ctx.getComputeResourceId()));
+
+            List<OutputDataObjectType> applicationOutputs = ctx.getApplicationInterfaceDescription().getApplicationOutputs();
+            if (applicationOutputs != null && !applicationOutputs.isEmpty()) {
+                for (OutputDataObjectType outputDataObjectType : applicationOutputs) {
+                    if (outputDataObjectType.getType().equals(DataType.STDOUT)) {
+                        if (outputDataObjectType.getValue() == null || outputDataObjectType.getValue().equals("")) {
+                            String stdOut = (ctx.getWorkingDir().endsWith(File.separator) ? ctx.getWorkingDir() : ctx.getWorkingDir() + File.separator)
+                                    + ctx.getApplicationInterfaceDescription().getApplicationName() + ".stdout";
+                            outputDataObjectType.setValue(stdOut);
+                            ctx.setStdoutLocation(stdOut);
+                        } else {
+                            ctx.setStdoutLocation(outputDataObjectType.getValue());
+                        }
+                    }
+                    if (outputDataObjectType.getType().equals(DataType.STDERR)) {
+                        if (outputDataObjectType.getValue() == null || outputDataObjectType.getValue().equals("")) {
+                            String stderrLocation = (ctx.getWorkingDir().endsWith(File.separator) ? ctx.getWorkingDir() : ctx.getWorkingDir() + File.separator)
+                                    + ctx.getApplicationInterfaceDescription().getApplicationName() + ".stderr";
+                            outputDataObjectType.setValue(stderrLocation);
+                            ctx.setStderrLocation(stderrLocation);
+                        } else {
+                            ctx.setStderrLocation(outputDataObjectType.getValue());
+                        }
+                    }
+                }
+            }
+
+            // TODO move this to some where else as this is not the correct place to do so
+            experimentCatalog.update(ExperimentCatalogModelType.PROCESS, processModel, processId);
+            processModel.setProcessOutputs(applicationOutputs);
             return ctx;
         }
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
index e4267ce..2119755 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
@@ -64,7 +64,7 @@ public class GroovyMapBuilder {
         mapData.setInputs(inputValues);
 
         List<String> inputValuesAll = getProcessInputValues(taskContext.getProcessModel().getProcessInputs(), false);
-        inputValues.addAll(getProcessOutputValues(taskContext.getProcessModel().getProcessOutputs(), false));
+        inputValuesAll.addAll(getProcessOutputValues(taskContext.getProcessModel().getProcessOutputs(), false));
         mapData.setInputsAll(inputValuesAll);
 
         mapData.setUserName(taskContext.getComputeResourceLoginUserName());
@@ -103,7 +103,7 @@ public class GroovyMapBuilder {
                 mapData.setQueueName(scheduling.getQueueName());
             }
             if (totalNodeCount > 0) {
-                mapData.setNodes(totalCPUCount);
+                mapData.setNodes(totalNodeCount);
             }
             if (totalCPUCount > 0) {
                 int ppn = totalCPUCount / totalNodeCount;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
index c85e18b..e21f200 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -46,6 +46,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
             jobModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
             jobModel.setTaskId(getTaskId());
             jobModel.setJobName(mapData.getJobName());
+            jobModel.setJobDescription("Sample description");
 
             if (mapData != null) {
                 //jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
@@ -71,10 +72,11 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                         statusList.add(new JobStatus(JobState.FAILED));
                         statusList.get(0).setReason(submissionOutput.getFailureReason());
                         jobModel.setJobStatuses(statusList);
-                        jobModel.setJobDescription("Sample description");
                         saveJobModel(jobModel);
                         logger.error("expId: " + getExperimentId() + ", processid: " + getProcessId()+ ", taskId: " +
-                                getTaskId() + " :- Job submission failed for job name " + jobModel.getJobName());
+                                getTaskId() + " :- Job submission failed for job name " + jobModel.getJobName()
+                                + ". Exit code : " + submissionOutput.getExitCode() + ", Submission failed : "
+                                + submissionOutput.isJobSubmissionFailed());
 
                         ErrorModel errorModel = new ErrorModel();
                         errorModel.setUserFriendlyMessage(submissionOutput.getFailureReason());
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
index b517af1..ac314e9 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
@@ -181,9 +181,9 @@ public abstract class JobSubmissionTask extends AiravataTask {
             MessageContext msgCtx = new MessageContext(jobStatusChangeEvent, MessageType.JOB, AiravataUtils.getId
                     (MessageType.JOB.name()), getGatewayId());
             msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-            getStatusPublisher().publish(msgCtx);
+            //getStatusPublisher().publish(msgCtx);
         } catch (Exception e) {
-            throw new Exception("Error persisting job status" + e.getLocalizedMessage(), e);
+            throw new Exception("Error persisting job status " + e.getLocalizedMessage(), e);
         }
     }
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
index 63921db..abd36e1 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
@@ -27,7 +27,7 @@ public class SimpleWorkflow {
 
     public static void main(String[] args) throws Exception {
 
-        String processId = "PROCESS_438a87cc-2dec-4edc-bfeb-31128df91bb6";
+        String processId = "PROCESS_5b252ad9-d630-4cf9-80e3-0c30c55d1001";
         AppCatalog appCatalog = RegistryFactory.getAppCatalog();
         ExperimentCatalog experimentCatalog = RegistryFactory.getDefaultExpCatalog();
 
@@ -51,10 +51,11 @@ public class SimpleWorkflow {
                     airavataTask = new EnvSetupTask();
                 } else if (taskModel.getTaskType() == TaskTypes.JOB_SUBMISSION) {
                     airavataTask = new DefaultJobSubmissionTask();
+                    airavataTask.setRetryCount(1);
                     jobSubmissionFound = true;
                 } else if (taskModel.getTaskType() == TaskTypes.DATA_STAGING) {
                     if (jobSubmissionFound) {
-                        airavataTask = new OutputDataStagingTask();
+                        //airavataTask = new OutputDataStagingTask();
                     } else {
                         airavataTask = new InputDataStagingTask();
                     }
diff --git a/modules/helix-spectator/src/main/resources/application.properties b/modules/helix-spectator/src/main/resources/application.properties
index a9b0969..b4b8048 100644
--- a/modules/helix-spectator/src/main/resources/application.properties
+++ b/modules/helix-spectator/src/main/resources/application.properties
@@ -1,3 +1,3 @@
 zookeeper.connection.url=localhost:2199
 helix.cluster.name=AiravataDemoCluster
-participant.name=all-p2
\ No newline at end of file
+participant.name=all-p3
\ No newline at end of file

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 13/17: Refactoring

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 782b0e81242604961dba675fc02386698cce48b9
Author: dimuthu <di...@gmail.com>
AuthorDate: Mon Mar 5 23:18:03 2018 -0500

    Refactoring
---
 .../airavata/helix/impl/participant/GlobalParticipant.java | 14 +++++++-------
 .../helix/impl/task/{ => completing}/CompletingTask.java   |  4 +++-
 .../airavata/helix/impl/task/{ => env}/EnvSetupTask.java   |  4 +++-
 .../helix/impl/task/{ => staging}/DataStagingTask.java     |  4 +++-
 .../impl/task/{ => staging}/InputDataStagingTask.java      |  4 +++-
 .../impl/task/{ => staging}/OutputDataStagingTask.java     |  4 +++-
 .../submission/{task => }/DefaultJobSubmissionTask.java    |  6 +++---
 .../task/submission/{task => }/ForkJobSubmissionTask.java  |  9 +++------
 .../impl/task/submission/{task => }/JobSubmissionTask.java | 14 ++------------
 .../task/submission/{task => }/LocalJobSubmissionTask.java |  9 ++++-----
 .../task/submission/{ => config}/GroovyMapBuilder.java     |  2 +-
 .../impl/task/submission/{ => config}/GroovyMapData.java   |  2 +-
 .../helix/impl/task/submission/config/JobFactory.java      |  6 ++----
 .../helix/impl/task/submission/{ => config}/Script.java    |  2 +-
 .../helix/impl/task/submission/{ => config}/ScriptTag.java |  2 +-
 .../impl/task/submission/{ => config}/SubmissionUtil.java  |  2 +-
 .../config/{imp => app}/ForkJobConfiguration.java          |  2 +-
 .../impl/task/submission/config/{imp => app}/JobUtil.java  |  2 +-
 .../config/{imp => app}/LSFJobConfiguration.java           |  2 +-
 .../config/{imp => app}/PBSJobConfiguration.java           |  2 +-
 .../config/{imp => app}/SlurmJobConfiguration.java         |  2 +-
 .../config/{imp => app}/UGEJobConfiguration.java           |  2 +-
 .../parser/AiravataCustomCommandOutputParser.java          |  2 +-
 .../config/{imp => app}/parser/ForkOutputParser.java       |  2 +-
 .../config/{imp => app}/parser/LSFOutputParser.java        |  2 +-
 .../config/{imp => app}/parser/PBSOutputParser.java        |  4 ++--
 .../config/{imp => app}/parser/SlurmOutputParser.java      |  4 ++--
 .../config/{imp => app}/parser/UGEOutputParser.java        |  3 +--
 .../airavata/helix/impl/workflow/PostWorkflowManager.java  |  6 ++----
 .../airavata/helix/impl/workflow/PreWorkflowManager.java   |  7 +++----
 .../helix-spectator/src/main/resources/log4j.properties    |  4 ++--
 31 files changed, 62 insertions(+), 72 deletions(-)

diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
index fc3fbcb..4849934 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
@@ -15,13 +15,13 @@ import java.util.Map;
 public class GlobalParticipant extends HelixParticipant {
 
     private String[] taskClasses = {
-        "org.apache.airavata.helix.impl.task.EnvSetupTask",
-        "org.apache.airavata.helix.impl.task.InputDataStagingTask",
-        "org.apache.airavata.helix.impl.task.OutputDataStagingTask",
-        "org.apache.airavata.helix.impl.task.CompletingTask",
-        "org.apache.airavata.helix.impl.task.submission.task.ForkJobSubmissionTask",
-        "org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask",
-        "org.apache.airavata.helix.impl.task.submission.task.LocalJobSubmissionTask"
+        "org.apache.airavata.helix.impl.task.env.EnvSetupTask",
+        "org.apache.airavata.helix.impl.task.staging.InputDataStagingTask",
+        "org.apache.airavata.helix.impl.task.staging.OutputDataStagingTask",
+        "org.apache.airavata.helix.impl.task.completing.CompletingTask",
+        "org.apache.airavata.helix.impl.task.submission.ForkJobSubmissionTask",
+        "org.apache.airavata.helix.impl.task.submission.DefaultJobSubmissionTask",
+        "org.apache.airavata.helix.impl.task.submission.LocalJobSubmissionTask"
     };
 
     public Map<String, TaskFactory> getTaskFactory() {
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java
similarity index 85%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java
index d036258..f32d019 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java
@@ -1,5 +1,7 @@
-package org.apache.airavata.helix.impl.task;
+package org.apache.airavata.helix.impl.task.completing;
 
+import org.apache.airavata.helix.impl.task.AiravataTask;
+import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.status.ProcessState;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
similarity index 92%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
index 0ad5698..6eb1722 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
@@ -1,6 +1,8 @@
-package org.apache.airavata.helix.impl.task;
+package org.apache.airavata.helix.impl.task.env;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.helix.impl.task.AiravataTask;
+import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.status.ProcessState;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
similarity index 96%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
index 594cbc9..76b4cb3 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
@@ -1,9 +1,11 @@
-package org.apache.airavata.helix.impl.task;
+package org.apache.airavata.helix.impl.task.staging;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.AgentException;
 import org.apache.airavata.agents.api.StorageResourceAdaptor;
 import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.helix.impl.task.AiravataTask;
+import org.apache.airavata.helix.impl.task.TaskOnFailException;
 import org.apache.airavata.helix.task.api.support.AdaptorSupport;
 import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
 import org.apache.airavata.model.task.DataStagingTaskModel;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
similarity index 96%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
index 2c885f4..de2aeac 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
@@ -1,8 +1,10 @@
-package org.apache.airavata.helix.impl.task;
+package org.apache.airavata.helix.impl.task.staging;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.AgentException;
 import org.apache.airavata.agents.api.StorageResourceAdaptor;
+import org.apache.airavata.helix.impl.task.TaskContext;
+import org.apache.airavata.helix.impl.task.TaskOnFailException;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
index 738d955..7d657cb 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
@@ -1,8 +1,10 @@
-package org.apache.airavata.helix.impl.task;
+package org.apache.airavata.helix.impl.task.staging;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.AgentException;
 import org.apache.airavata.agents.api.StorageResourceAdaptor;
+import org.apache.airavata.helix.impl.task.TaskContext;
+import org.apache.airavata.helix.impl.task.TaskOnFailException;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
index 9b015bb..82316f0 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
@@ -1,11 +1,11 @@
-package org.apache.airavata.helix.impl.task.submission.task;
+package org.apache.airavata.helix.impl.task.submission;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.helix.impl.task.TaskContext;
-import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
-import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
+import org.apache.airavata.helix.impl.task.submission.config.GroovyMapBuilder;
+import org.apache.airavata.helix.impl.task.submission.config.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
similarity index 90%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
index afce74e..06ce0ea 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
@@ -1,21 +1,18 @@
-package org.apache.airavata.helix.impl.task.submission.task;
+package org.apache.airavata.helix.impl.task.submission;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.helix.impl.task.TaskContext;
-import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
-import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
-import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
+import org.apache.airavata.helix.impl.task.submission.config.GroovyMapBuilder;
+import org.apache.airavata.helix.impl.task.submission.config.GroovyMapData;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.job.JobModel;
 import org.apache.airavata.model.status.JobState;
 import org.apache.airavata.model.status.JobStatus;
-import org.apache.commons.io.FileUtils;
 import org.apache.helix.task.TaskResult;
 
-import java.io.File;
 import java.util.Arrays;
 
 @TaskDef(name = "Fork Job Submission")
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
similarity index 91%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
index 4fed22d..7bf5034 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.helix.impl.task.submission.task;
+package org.apache.airavata.helix.impl.task.submission;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.CommandOutput;
@@ -7,20 +7,11 @@ import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.impl.task.AiravataTask;
-import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
+import org.apache.airavata.helix.impl.task.submission.config.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.config.JobFactory;
 import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
 import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
 import org.apache.airavata.messaging.core.MessageContext;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
-import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
-import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
-import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
-import org.apache.airavata.model.commons.ErrorModel;
 import org.apache.airavata.model.job.JobModel;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
@@ -36,7 +27,6 @@ import org.apache.helix.HelixManager;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.ZooDefs;
 
 import java.io.File;
 import java.security.SecureRandom;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java
similarity index 90%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java
index 3e51b4f..db582c3 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java
@@ -1,13 +1,12 @@
-package org.apache.airavata.helix.impl.task.submission.task;
+package org.apache.airavata.helix.impl.task.submission;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.helix.impl.task.TaskContext;
-import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
-import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
-import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
-import org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask;
+import org.apache.airavata.helix.impl.task.submission.config.GroovyMapBuilder;
+import org.apache.airavata.helix.impl.task.submission.config.GroovyMapData;
+import org.apache.airavata.helix.impl.task.submission.config.SubmissionUtil;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.job.JobModel;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java
similarity index 99%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java
index 2119755..8b226ed 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.helix.impl.task.submission;
+package org.apache.airavata.helix.impl.task.submission.config;
 
 import groovy.text.GStringTemplateEngine;
 import groovy.text.TemplateEngine;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
similarity index 99%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
index 6ebde21..5414a46 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.helix.impl.task.submission;
+package org.apache.airavata.helix.impl.task.submission.config;
 
 import groovy.lang.Writable;
 import groovy.text.GStringTemplateEngine;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
index b04ffd8..4ac0f80 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
@@ -1,12 +1,10 @@
 package org.apache.airavata.helix.impl.task.submission.config;
 
-import org.apache.airavata.helix.impl.task.submission.config.imp.*;
-import org.apache.airavata.helix.impl.task.submission.config.imp.parser.*;
+import org.apache.airavata.helix.impl.task.submission.config.app.*;
+import org.apache.airavata.helix.impl.task.submission.config.app.parser.*;
 import org.apache.airavata.model.appcatalog.computeresource.*;
-import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.apache.airavata.registry.cpi.RegistryException;
 
 public class JobFactory {
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/Script.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java
similarity index 95%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/Script.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java
index 208e9e5..d68fa1b 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/Script.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.helix.impl.task.submission;
+package org.apache.airavata.helix.impl.task.submission.config;
 
 public enum Script {
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ScriptTag.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java
similarity index 83%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ScriptTag.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java
index c03c11f..44f6e22 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ScriptTag.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.helix.impl.task.submission;
+package org.apache.airavata.helix.impl.task.submission.config;
 
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/SubmissionUtil.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java
similarity index 69%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/SubmissionUtil.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java
index e2cbfee..7846ac8 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/SubmissionUtil.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.helix.impl.task.submission;
+package org.apache.airavata.helix.impl.task.submission.config;
 
 import java.io.File;
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/ForkJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/ForkJobConfiguration.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java
index d25f17f..22237bd 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/ForkJobConfiguration.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp;
+package org.apache.airavata.helix.impl.task.submission.config.app;
 
 import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/JobUtil.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java
similarity index 97%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/JobUtil.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java
index 36bce60..655e338 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/JobUtil.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp;
+package org.apache.airavata.helix.impl.task.submission.config.app;
 
 import org.apache.airavata.model.status.JobState;
 import org.slf4j.Logger;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/LSFJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/LSFJobConfiguration.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java
index bccd7ee..bec46a3 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/LSFJobConfiguration.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp;
+package org.apache.airavata.helix.impl.task.submission.config.app;
 
 import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/PBSJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/PBSJobConfiguration.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java
index aeedeb9..52c132e 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/PBSJobConfiguration.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp;
+package org.apache.airavata.helix.impl.task.submission.config.app;
 
 import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/SlurmJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/SlurmJobConfiguration.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java
index fc431ce..42f5cfe 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/SlurmJobConfiguration.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp;
+package org.apache.airavata.helix.impl.task.submission.config.app;
 
 import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/UGEJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/UGEJobConfiguration.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java
index 6a12966..2befbf9 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/UGEJobConfiguration.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp;
+package org.apache.airavata.helix.impl.task.submission.config.app;
 
 import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/AiravataCustomCommandOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java
similarity index 96%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/AiravataCustomCommandOutputParser.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java
index c3a5a2e..d622b3e 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/AiravataCustomCommandOutputParser.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+package org.apache.airavata.helix.impl.task.submission.config.app.parser;
 
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
 import org.apache.airavata.model.status.JobStatus;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/ForkOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java
similarity index 96%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/ForkOutputParser.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java
index a4f48cc..2b67cff 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/ForkOutputParser.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+package org.apache.airavata.helix.impl.task.submission.config.app.parser;
 
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/LSFOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java
similarity index 98%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/LSFOutputParser.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java
index 0bf812f..7f3638b 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/LSFOutputParser.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java
@@ -17,7 +17,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+package org.apache.airavata.helix.impl.task.submission.config.app.parser;
 
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
 import org.apache.airavata.model.status.JobState;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/PBSOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java
similarity index 97%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/PBSOutputParser.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java
index 3be8c8a..d82673b 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/PBSOutputParser.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java
@@ -17,10 +17,10 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+package org.apache.airavata.helix.impl.task.submission.config.app.parser;
 
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
-import org.apache.airavata.helix.impl.task.submission.config.imp.JobUtil;
+import org.apache.airavata.helix.impl.task.submission.config.app.JobUtil;
 import org.apache.airavata.model.status.JobStatus;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/SlurmOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java
similarity index 97%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/SlurmOutputParser.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java
index 3ebbcfd..459d4f7 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/SlurmOutputParser.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java
@@ -17,10 +17,10 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+package org.apache.airavata.helix.impl.task.submission.config.app.parser;
 
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
-import org.apache.airavata.helix.impl.task.submission.config.imp.JobUtil;
+import org.apache.airavata.helix.impl.task.submission.config.app.JobUtil;
 import org.apache.airavata.model.status.JobState;
 import org.apache.airavata.model.status.JobStatus;
 import org.slf4j.Logger;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/UGEOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java
similarity index 96%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/UGEOutputParser.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java
index 0f457ff..b454ea5 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/UGEOutputParser.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java
@@ -17,10 +17,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+package org.apache.airavata.helix.impl.task.submission.config.app.parser;
 
 import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
-import org.apache.airavata.helix.impl.task.submission.config.imp.parser.PBSOutputParser;
 import org.apache.airavata.model.status.JobState;
 import org.apache.airavata.model.status.JobStatus;
 import org.slf4j.Logger;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
index b4ffacf..b0de43a 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
@@ -6,8 +6,8 @@ import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.core.OutPort;
 import org.apache.airavata.helix.impl.task.*;
-import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
-import org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask;
+import org.apache.airavata.helix.impl.task.completing.CompletingTask;
+import org.apache.airavata.helix.impl.task.staging.OutputDataStagingTask;
 import org.apache.airavata.helix.workflow.WorkflowManager;
 import org.apache.airavata.job.monitor.kafka.JobStatusResultDeserializer;
 import org.apache.airavata.job.monitor.parser.JobStatusResult;
@@ -17,7 +17,6 @@ import org.apache.airavata.messaging.core.Publisher;
 import org.apache.airavata.messaging.core.Type;
 import org.apache.airavata.messaging.core.impl.RabbitMQPublisher;
 import org.apache.airavata.model.experiment.ExperimentModel;
-import org.apache.airavata.model.job.JobModel;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.MessageType;
@@ -36,7 +35,6 @@ import org.apache.kafka.clients.consumer.Consumer;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.serialization.LongDeserializer;
 import org.apache.kafka.common.serialization.StringDeserializer;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
index 3030375..ac29c9d 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
@@ -5,9 +5,9 @@ import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.ThriftUtils;
 import org.apache.airavata.helix.core.OutPort;
 import org.apache.airavata.helix.impl.task.AiravataTask;
-import org.apache.airavata.helix.impl.task.EnvSetupTask;
-import org.apache.airavata.helix.impl.task.InputDataStagingTask;
-import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
+import org.apache.airavata.helix.impl.task.env.EnvSetupTask;
+import org.apache.airavata.helix.impl.task.staging.InputDataStagingTask;
+import org.apache.airavata.helix.impl.task.submission.DefaultJobSubmissionTask;
 import org.apache.airavata.helix.workflow.WorkflowManager;
 import org.apache.airavata.messaging.core.*;
 import org.apache.airavata.model.experiment.ExperimentModel;
@@ -19,7 +19,6 @@ import org.apache.airavata.model.task.TaskTypes;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.ExperimentCatalog;
 import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
-import org.apache.airavata.registry.cpi.RegistryException;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 import org.apache.thrift.TBase;
diff --git a/modules/helix-spectator/src/main/resources/log4j.properties b/modules/helix-spectator/src/main/resources/log4j.properties
index dba82a1..04aa72a 100644
--- a/modules/helix-spectator/src/main/resources/log4j.properties
+++ b/modules/helix-spectator/src/main/resources/log4j.properties
@@ -3,8 +3,8 @@ log4j.rootLogger=INFO, A1
 
 log4j.category.org.apache.helix=WARN
 log4j.category.org.apache.zookeeper=WARN
-log4j.category.org.apache.airavata.helix.impl.task.submission.GroovyMapData=TRACE
-log4j.category.org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask=DEBUG
+log4j.category.org.apache.airavata.helix.impl.task.submission.config.GroovyMapData=TRACE
+log4j.category.org.apache.airavata.helix.impl.task.submission.JobSubmissionTask=DEBUG
 # A1 is set to be a ConsoleAppender.
 log4j.appender.A1=org.apache.log4j.ConsoleAppender
 

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 05/17: Implementing DataStaging tasks

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 7350b253564a91e4a6ebcd8df3673c69f3d1ba92
Author: dimuthu <di...@gmail.com>
AuthorDate: Thu Mar 1 12:27:31 2018 -0500

    Implementing DataStaging tasks
---
 .../apache/airavata/agents/api/AgentAdaptor.java   |   6 +-
 .../agents/api/StorageResourceAdaptor.java         |   7 +
 .../helix/agent/local/LocalAgentAdaptor.java       |  25 ++-
 .../airavata/helix/agent/ssh/SshAgentAdaptor.java  | 128 ++++++++++++++-
 .../agent/storage/StorageResourceAdaptorImpl.java  |  85 ++++++++++
 .../apache/airavata/helix/task/api/TaskHelper.java |   1 +
 .../helix/task/api/support/AdaptorSupport.java     |   5 +-
 .../helix/core/support/AdaptorSupportImpl.java     |   8 +
 .../helix/core/support/TaskHelperImpl.java         |   1 +
 .../helix/impl/participant/GlobalParticipant.java  |   3 +-
 .../airavata/helix/impl/task/DataStagingTask.java  |  90 +++++++++--
 .../helix/impl/task/InputDataStagingTask.java      | 117 ++++++++++++++
 .../helix/impl/task/OutputDataStagingTask.java     | 171 +++++++++++++++++++++
 .../airavata/helix/impl/task/TaskContext.java      |   6 +-
 .../helix/impl/task/TaskOnFailException.java       |  28 ++++
 .../impl/task/submission/GroovyMapBuilder.java     |   3 +-
 .../task/submission/task/JobSubmissionTask.java    |   2 +-
 .../helix/impl/workflow/SimpleWorkflow.java        |  17 +-
 18 files changed, 673 insertions(+), 30 deletions(-)

diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java
index 2d295de..2948dc1 100644
--- a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java
@@ -17,7 +17,11 @@ public interface AgentAdaptor {
 
     public void createDirectory(String path) throws AgentException;
 
-    public void copyFile(String sourceFile, String destinationFile) throws AgentException;
+    public void copyFileTo(String localFile, String remoteFile) throws AgentException;
+
+    public void copyFileFrom(String remoteFile, String localFile) throws AgentException;
 
     public List<String> listDirectory(String path) throws AgentException;
+
+    public List<String> getFileNameFromExtension(String fileName, String parentPath) throws AgentException;
 }
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/StorageResourceAdaptor.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/StorageResourceAdaptor.java
new file mode 100644
index 0000000..9c5d471
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/StorageResourceAdaptor.java
@@ -0,0 +1,7 @@
+package org.apache.airavata.agents.api;
+
+public interface StorageResourceAdaptor {
+    public void init(String storageResourceId, String gatewayId, String loginUser, String token) throws AgentException;
+    public void uploadFile(String sourceFile, String destFile) throws AgentException;
+    public void downloadFile(String sourceFile, String destFile) throws AgentException;
+}
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java
index af507bf..7a56526 100644
--- a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java
@@ -3,9 +3,7 @@ package org.apache.airavata.helix.agent.local;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.AgentException;
 import org.apache.airavata.agents.api.CommandOutput;
-import org.apache.airavata.agents.api.JobSubmissionOutput;
 
-import java.io.File;
 import java.util.List;
 
 public class LocalAgentAdaptor implements AgentAdaptor {
@@ -13,31 +11,42 @@ public class LocalAgentAdaptor implements AgentAdaptor {
 
 
     public void init(Object agentPams) throws AgentException {
-
+        throw new AgentException("Operation not implemented");
     }
 
     @Override
     public void init(String computeResource, String gatewayId, String userId, String token) throws AgentException {
-
+        throw new AgentException("Operation not implemented");
     }
 
     @Override
     public CommandOutput executeCommand(String command, String workingDirectory) throws AgentException {
-        return null;
+        throw new AgentException("Operation not implemented");
     }
 
     @Override
     public void createDirectory(String path) throws AgentException {
-
+        throw new AgentException("Operation not implemented");
     }
 
     @Override
-    public void copyFile(String sourceFile, String destinationFile) throws AgentException {
+    public void copyFileTo(String localFile, String remoteFile) throws AgentException {
+        throw new AgentException("Operation not implemented");
+    }
 
+    @Override
+    public void copyFileFrom(String remoteFile, String localFile) throws AgentException {
+        throw new AgentException("Operation not implemented");
     }
 
+
     @Override
     public List<String> listDirectory(String path) throws AgentException {
-        return null;
+        throw new AgentException("Operation not implemented");
+    }
+
+    @Override
+    public List<String> getFileNameFromExtension(String fileName, String parentPath) throws AgentException {
+        throw new AgentException("Operation not implemented");
     }
 }
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
index ef8d580..2ad2415 100644
--- a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
@@ -190,7 +190,7 @@ public class SshAgentAdaptor implements AgentAdaptor {
         }
     }
 
-    public void copyFile(String localFile, String remoteFile) throws AgentException {
+    public void copyFileTo(String localFile, String remoteFile) throws AgentException {
         FileInputStream fis = null;
         String prefix = null;
         if (new File(localFile).isDirectory()) {
@@ -296,6 +296,127 @@ public class SshAgentAdaptor implements AgentAdaptor {
         }
     }
 
+    // TODO file not found does not return exception
+    public void copyFileFrom(String remoteFile, String localFile) throws AgentException {
+        FileOutputStream fos = null;
+        ChannelExec channelExec = null;
+        try {
+            String prefix = null;
+            if (new File(localFile).isDirectory()) {
+                prefix = localFile + File.separator;
+            }
+
+            StandardOutReader stdOutReader = new StandardOutReader();
+
+            // exec 'scp -f remotefile' remotely
+            String command = "scp -f " + remoteFile;
+            channelExec = (ChannelExec)session.openChannel("exec");
+            channelExec.setCommand(command);
+
+            //channelExec.setErrStream(stdOutReader.getStandardError());
+            // get I/O streams for remote scp
+            OutputStream out = channelExec.getOutputStream();
+            InputStream in = channelExec.getInputStream();
+            InputStream err = channelExec.getErrStream();
+
+            if (!channelExec.isClosed()){
+                channelExec.connect();
+            }
+
+            byte[] buf = new byte[1024];
+
+            // send '\0'
+            buf[0] = 0;
+            out.write(buf, 0, 1);
+            out.flush();
+
+            while (true) {
+                int c = checkAck(in);
+                if (c != 'C') {
+                    break;
+                }
+
+                // read '0644 '
+                in.read(buf, 0, 5);
+
+                long filesize = 0L;
+                while (true) {
+                    if (in.read(buf, 0, 1) < 0) {
+                        // error
+                        break;
+                    }
+                    if (buf[0] == ' ') break;
+                    filesize = filesize * 10L + (long) (buf[0] - '0');
+                }
+
+                String file = null;
+                for (int i = 0; ; i++) {
+                    in.read(buf, i, 1);
+                    if (buf[i] == (byte) 0x0a) {
+                        file = new String(buf, 0, i);
+                        break;
+                    }
+                }
+
+                //System.out.println("filesize="+filesize+", file="+file);
+
+                // send '\0'
+                buf[0] = 0;
+                out.write(buf, 0, 1);
+                out.flush();
+
+                // read a content of lfile
+                fos = new FileOutputStream(prefix == null ? localFile : prefix + file);
+                int foo;
+                while (true) {
+                    if (buf.length < filesize) foo = buf.length;
+                    else foo = (int) filesize;
+                    foo = in.read(buf, 0, foo);
+                    if (foo < 0) {
+                        // error
+                        break;
+                    }
+                    fos.write(buf, 0, foo);
+                    filesize -= foo;
+                    if (filesize == 0L) break;
+                }
+                fos.close();
+                fos = null;
+
+                if (checkAck(in) != 0) {
+                    String error = "Error transfering the file content";
+                    //log.error(error);
+                    throw new AgentException(error);
+                }
+
+                // send '\0'
+                buf[0] = 0;
+                out.write(buf, 0, 1);
+                out.flush();
+            }
+
+
+            stdOutReader.readStdErrFromStream(err);
+            if (stdOutReader.getStdError().contains("scp:")) {
+                throw new AgentException(stdOutReader.getStdError());
+            }
+
+        } catch (Exception e) {
+            //log.error(e.getMessage(), e);
+            throw new AgentException(e);
+        } finally {
+            try {
+                if (fos != null) fos.close();
+            } catch (Exception ee) {
+            }
+
+            if (channelExec != null) {
+                channelExec.disconnect();
+            }
+
+        }
+    }
+
     @Override
     public List<String> listDirectory(String path) throws AgentException {
         String command = "ls " + path;
@@ -333,6 +454,11 @@ public class SshAgentAdaptor implements AgentAdaptor {
         }
     }
 
+    @Override
+    public List<String> getFileNameFromExtension(String fileName, String parentPath) throws AgentException {
+        throw new AgentException("Operation not implemented");
+    }
+
     private static class DefaultUserInfo implements UserInfo, UIKeyboardInteractive {
 
         private String userName;
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/storage/StorageResourceAdaptorImpl.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/storage/StorageResourceAdaptorImpl.java
new file mode 100644
index 0000000..537f17d
--- /dev/null
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/storage/StorageResourceAdaptorImpl.java
@@ -0,0 +1,85 @@
+package org.apache.airavata.helix.agent.storage;
+
+import com.jcraft.jsch.Session;
+import org.apache.airavata.agents.api.AgentException;
+import org.apache.airavata.agents.api.StorageResourceAdaptor;
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.DBUtil;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.credential.store.credential.Credential;
+import org.apache.airavata.credential.store.credential.impl.ssh.SSHCredential;
+import org.apache.airavata.credential.store.store.CredentialStoreException;
+import org.apache.airavata.credential.store.store.impl.CredentialReaderImpl;
+import org.apache.airavata.helix.agent.ssh.SshAdaptorParams;
+import org.apache.airavata.helix.agent.ssh.SshAgentAdaptor;
+import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
+import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
+import org.apache.airavata.registry.cpi.AppCatalog;
+import org.apache.airavata.registry.cpi.AppCatalogException;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+public class StorageResourceAdaptorImpl extends SshAgentAdaptor implements StorageResourceAdaptor  {
+
+    private static final Logger logger = LogManager.getLogger(SshAgentAdaptor.class);
+
+    private Session session = null;
+    private AppCatalog appCatalog;
+
+    @Override
+    public void init(String storageResourceId, String gatewayId, String loginUser, String token) throws AgentException {
+
+        try {
+            logger.info("Initializing Storage Resource Adaptor for storage resource : "+ storageResourceId + ", gateway : " +
+                    gatewayId +", user " + loginUser + ", token : " + token);
+            this.appCatalog = RegistryFactory.getAppCatalog();
+            StorageResourceDescription storageResource = appCatalog.getStorageResource().getStorageResource(storageResourceId);
+            String hostName = storageResource.getHostName();
+
+            String jdbcUrl = ServerSettings.getCredentialStoreDBURL();
+            String jdbcUsr = ServerSettings.getCredentialStoreDBUser();
+            String jdbcPass = ServerSettings.getCredentialStoreDBPassword();
+            String driver = ServerSettings.getCredentialStoreDBDriver();
+            CredentialReaderImpl credentialReader = new CredentialReaderImpl(new DBUtil(jdbcUrl, jdbcUsr, jdbcPass, driver));
+
+            logger.info("Fetching credentials for cred store token " + token);
+
+            Credential credential = credentialReader.getCredential(gatewayId, token);
+
+            if (credential instanceof SSHCredential) {
+                SSHCredential sshCredential = SSHCredential.class.cast(credential);
+                SshAdaptorParams adaptorParams = new SshAdaptorParams();
+                adaptorParams.setHostName(storageResource.getHostName());
+                adaptorParams.setUserName(loginUser);
+                adaptorParams.setPassphrase(sshCredential.getPassphrase());
+                adaptorParams.setPrivateKey(sshCredential.getPrivateKey());
+                adaptorParams.setPublicKey(sshCredential.getPublicKey());
+                adaptorParams.setStrictHostKeyChecking(false);
+                init(adaptorParams);
+            }
+
+        } catch (AppCatalogException e) {
+            e.printStackTrace();
+        } catch (InstantiationException e) {
+            e.printStackTrace();
+        } catch (IllegalAccessException e) {
+            e.printStackTrace();
+        } catch (CredentialStoreException e) {
+            e.printStackTrace();
+        } catch (ClassNotFoundException e) {
+            e.printStackTrace();
+        } catch (ApplicationSettingsException e) {
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public void uploadFile(String sourceFile, String destFile) throws AgentException {
+        super.copyFileTo(sourceFile, destFile);
+    }
+
+    @Override
+    public void downloadFile(String sourceFile, String destFile) throws AgentException {
+        super.copyFileFrom(sourceFile, destFile);
+    }
+}
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java
index 07de06e..4550a66 100644
--- a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java
@@ -1,5 +1,6 @@
 package org.apache.airavata.helix.task.api;
 
+import org.apache.airavata.agents.api.StorageResourceAdaptor;
 import org.apache.airavata.helix.task.api.support.AdaptorSupport;
 
 /**
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
index 4b6e11e..456fdee 100644
--- a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
@@ -1,8 +1,6 @@
 package org.apache.airavata.helix.task.api.support;
 
-import org.apache.airavata.agents.api.AgentAdaptor;
-import org.apache.airavata.agents.api.CommandOutput;
-import org.apache.airavata.agents.api.JobSubmissionOutput;
+import org.apache.airavata.agents.api.*;
 
 import java.io.File;
 
@@ -16,5 +14,6 @@ public interface AdaptorSupport {
     public void initializeAdaptor();
 
     public AgentAdaptor fetchAdaptor(String gatewayId, String computeResource, String protocol, String authToken, String userId) throws Exception;
+    public StorageResourceAdaptor fetchStorageAdaptor(String gatewayId, String storageResourceId, String protocol,  String authToken, String userId) throws AgentException;
 
 }
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
index a98b8f0..c264012 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
@@ -2,6 +2,7 @@ package org.apache.airavata.helix.core.support;
 
 import org.apache.airavata.agents.api.*;
 import org.apache.airavata.helix.agent.ssh.SshAgentAdaptor;
+import org.apache.airavata.helix.agent.storage.StorageResourceAdaptorImpl;
 import org.apache.airavata.helix.task.api.support.AdaptorSupport;
 
 import java.io.File;
@@ -35,4 +36,11 @@ public class AdaptorSupportImpl implements AdaptorSupport {
         agentAdaptor.init(computeResource, gatewayId, userId, authToken);
         return agentAdaptor;
     }
+
+    @Override
+    public StorageResourceAdaptor fetchStorageAdaptor(String gatewayId, String storageResourceId, String protocol, String authToken, String userId) throws AgentException {
+        StorageResourceAdaptor storageResourceAdaptor = new StorageResourceAdaptorImpl();
+        storageResourceAdaptor.init(storageResourceId, gatewayId, userId, authToken);
+        return storageResourceAdaptor;
+    }
 }
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java
index 77fc5ce..2987ebd 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java
@@ -1,5 +1,6 @@
 package org.apache.airavata.helix.core.support;
 
+import org.apache.airavata.agents.api.StorageResourceAdaptor;
 import org.apache.airavata.helix.task.api.TaskHelper;
 
 /**
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
index f0e166b..984b277 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
@@ -16,7 +16,8 @@ public class GlobalParticipant extends HelixParticipant {
 
     private String[] taskClasses = {
         "org.apache.airavata.helix.impl.task.EnvSetupTask",
-        "org.apache.airavata.helix.impl.task.DataStagingTask",
+        "org.apache.airavata.helix.impl.task.InputDataStagingTask",
+        "org.apache.airavata.helix.impl.task.OutputDataStagingTask",
         "org.apache.airavata.helix.impl.task.submission.task.ForkJobSubmissionTask",
         "org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask",
         "org.apache.airavata.helix.impl.task.submission.task.LocalJobSubmissionTask"
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java
index 346aa73..594cbc9 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java
@@ -1,19 +1,89 @@
 package org.apache.airavata.helix.impl.task;
 
-import org.apache.airavata.helix.task.api.TaskHelper;
-import org.apache.airavata.helix.task.api.annotation.TaskDef;
-import org.apache.helix.task.TaskResult;
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.AgentException;
+import org.apache.airavata.agents.api.StorageResourceAdaptor;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.helix.task.api.support.AdaptorSupport;
+import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
+import org.apache.airavata.model.task.DataStagingTaskModel;
+import org.apache.airavata.registry.cpi.AppCatalogException;
+import org.apache.commons.io.FileUtils;
 
-@TaskDef(name = "Data Staging Task")
-public class DataStagingTask extends AiravataTask {
+import java.io.File;
+import java.io.IOException;
 
-    @Override
-    public TaskResult onRun(TaskHelper taskHelper) {
-        return null;
+public abstract class DataStagingTask extends AiravataTask {
+
+    protected DataStagingTaskModel getDataStagingTaskModel() throws TaskOnFailException {
+        try {
+            Object subTaskModel = getTaskContext().getSubTaskModel();
+            if (subTaskModel != null) {
+                return DataStagingTaskModel.class.cast(subTaskModel);
+            } else {
+                throw new TaskOnFailException("Data staging task model can not be null for task " + getTaskId(), true, null);
+            }
+        } catch (Exception e) {
+            throw new TaskOnFailException("Failed while obtaining data staging task model for task " + getTaskId(), true, e);
+        }
+    }
+
+    protected StorageResourceDescription getStorageResource() throws TaskOnFailException {
+        try {
+            StorageResourceDescription storageResource = getTaskContext().getStorageResource();
+            if (storageResource == null) {
+                throw new TaskOnFailException("Storage resource can not be null for task " + getTaskId(), true, null);
+            }
+            return storageResource;
+        } catch (AppCatalogException e) {
+            throw new TaskOnFailException("Failed to fetch the storage resource for task " + getTaskId(), true, e);
+        }
     }
 
-    @Override
-    public void onCancel() {
+    protected StorageResourceAdaptor getStorageAdaptor(AdaptorSupport adaptorSupport) throws TaskOnFailException {
+        try {
+            StorageResourceAdaptor storageResourceAdaptor = adaptorSupport.fetchStorageAdaptor(
+                    getGatewayId(),
+                    getTaskContext().getStorageResourceId(),
+                    "SSH",
+                    getTaskContext().getStorageResourceCredentialToken(),
+                    getTaskContext().getStorageResourceLoginUserName());
+
+            if (storageResourceAdaptor == null) {
+                throw new TaskOnFailException("Storage resource adaptor for " + getTaskContext().getStorageResourceId() + " can not be null", true, null);
+            }
+            return storageResourceAdaptor;
+        } catch (AgentException e) {
+            throw new TaskOnFailException("Failed to obtain adaptor for storage resource " + getTaskContext().getStorageResourceId() +
+                    " in task " + getTaskId(), true, e);
+        }
+    }
+
+    protected AgentAdaptor getComputeResourceAdaptor(AdaptorSupport adaptorSupport) throws TaskOnFailException {
+        try {
+            return adaptorSupport.fetchAdaptor(
+                    getTaskContext().getGatewayId(),
+                    getTaskContext().getComputeResourceId(),
+                    getTaskContext().getJobSubmissionProtocol().name(),
+                    getTaskContext().getComputeResourceCredentialToken(),
+                    getTaskContext().getComputeResourceLoginUserName());
+        } catch (Exception e) {
+            throw new TaskOnFailException("Failed to obtain adaptor for compute resource " + getTaskContext().getComputeResourceId() +
+                    " in task " + getTaskId(), true, e);
+        }
+    }
 
+    protected String getLocalDataPath(String fileName) throws TaskOnFailException {
+        String localDataPath = ServerSettings.getLocalDataLocation();
+        localDataPath = (localDataPath.endsWith(File.separator) ? localDataPath : localDataPath + File.separator);
+        localDataPath = (localDataPath.endsWith(File.separator) ? localDataPath : localDataPath + File.separator) +
+                getProcessId() + File.separator + "temp_inputs" + File.separator;
+        try {
+            FileUtils.forceMkdir(new File(localDataPath));
+        } catch (IOException e) {
+            throw new TaskOnFailException("Failed build directories " + localDataPath, true, e);
+        }
+        localDataPath = localDataPath + fileName;
+        return localDataPath;
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
new file mode 100644
index 0000000..30eeec0
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
@@ -0,0 +1,117 @@
+package org.apache.airavata.helix.impl.task;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.AgentException;
+import org.apache.airavata.agents.api.StorageResourceAdaptor;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
+import org.apache.airavata.model.application.io.InputDataObjectType;
+import org.apache.airavata.model.task.DataStagingTaskModel;
+import org.apache.airavata.registry.cpi.AppCatalogException;
+import org.apache.commons.io.FileUtils;
+import org.apache.helix.task.TaskResult;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+@TaskDef(name = "Input Data Staging Task")
+public class InputDataStagingTask extends DataStagingTask {
+
+    private static final Logger logger = LogManager.getLogger(InputDataStagingTask.class);
+
+    @Override
+    public TaskResult onRun(TaskHelper taskHelper) {
+        logger.info("Starting Input Data Staging Task " + getTaskId());
+
+        try {
+            // Get and validate data staging task model
+            DataStagingTaskModel dataStagingTaskModel = getDataStagingTaskModel();
+
+            // Fetch and validate input data type from data staging task model
+            InputDataObjectType processInput = dataStagingTaskModel.getProcessInput();
+            if (processInput != null && processInput.getValue() == null) {
+                String message = "expId: " + getExperimentId() + ", processId: " + getProcessId() + ", taskId: " + getTaskId() +
+                        ":- Couldn't stage file " + processInput.getName() + " , file name shouldn't be null. ";
+                logger.error(message);
+                if (processInput.isIsRequired()) {
+                    message += "File name is null, but this input's isRequired bit is not set";
+                } else {
+                    message += "File name is null";
+                }
+                logger.error(message);
+                throw new TaskOnFailException(message, true, null);
+            }
+
+            // Fetch and validate storage resource
+            StorageResourceDescription storageResource = getStorageResource();
+
+            // Fetch and validate source and destination URLS
+            URI sourceURI;
+            URI destinationURI;
+            String sourceFileName;
+            try {
+                sourceURI = new URI(dataStagingTaskModel.getSource());
+                destinationURI = new URI(dataStagingTaskModel.getDestination());
+
+                if (logger.isDebugEnabled()) {
+                    logger.debug("Source file " + sourceURI.getPath() + ", destination uri " + destinationURI.getPath() + " for task " + getTaskId());
+                }
+
+                sourceFileName = sourceURI.getPath().substring(sourceURI.getPath().lastIndexOf(File.separator) + 1,
+                        sourceURI.getPath().length());
+            } catch (URISyntaxException e) {
+                throw new TaskOnFailException("Failed to obtain source URI for input data staging task " + getTaskId(), true, e);
+            }
+
+            // Fetch and validate storage adaptor
+            StorageResourceAdaptor storageResourceAdaptor = getStorageAdaptor(taskHelper.getAdaptorSupport());
+
+            // Fetch and validate compute resource adaptor
+            AgentAdaptor adaptor = getComputeResourceAdaptor(taskHelper.getAdaptorSupport());
+
+            String localSourceFilePath = getLocalDataPath(sourceFileName);
+            // Downloading input file from the storage resource
+            try {
+                logger.info("Downloading input file " + sourceURI.getPath() + " to the local path " + localSourceFilePath);
+                storageResourceAdaptor.downloadFile(sourceURI.getPath(), localSourceFilePath);
+                logger.info("Input file downloaded to " + localSourceFilePath);
+            } catch (AgentException e) {
+                throw new TaskOnFailException("Failed downloading input file " + sourceFileName + " to the local path " + localSourceFilePath, true, e);
+            }
+
+            // Uploading input file to the compute resource
+            try {
+                logger.info("Uploading the input file to " + destinationURI.getPath() + " from local path " + localSourceFilePath);
+                adaptor.copyFileTo(localSourceFilePath, destinationURI.getPath());
+                logger.info("Output file uploaded to " + destinationURI.getPath());
+            } catch (AgentException e) {
+                throw new TaskOnFailException("Failed uploading the input file to " + destinationURI.getPath() + " from local path " + localSourceFilePath, true, e);
+            }
+
+            return onSuccess("Input data staging task " + getTaskId() + " successfully completed");
+
+        } catch (TaskOnFailException e) {
+            if (e.getError() != null) {
+                logger.error(e.getReason(), e.getError());
+            } else {
+                logger.error(e.getReason());
+            }
+            return onFail(e.getReason(), e.isCritical(), e.getError());
+
+        }catch (Exception e) {
+            logger.error("Unknown error while executing input data staging task " + getTaskId(), e);
+            return onFail("Unknown error while executing input data staging task " + getTaskId(), false,  e);
+        }
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
new file mode 100644
index 0000000..d2280d0
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
@@ -0,0 +1,171 @@
+package org.apache.airavata.helix.impl.task;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.AgentException;
+import org.apache.airavata.agents.api.StorageResourceAdaptor;
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
+import org.apache.airavata.model.application.io.OutputDataObjectType;
+import org.apache.airavata.model.task.DataStagingTaskModel;
+import org.apache.airavata.registry.cpi.ExpCatChildDataType;
+import org.apache.airavata.registry.cpi.RegistryException;
+import org.apache.helix.task.TaskResult;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+import java.io.File;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.List;
+
+@TaskDef(name = "Output Data Staging Task")
+public class OutputDataStagingTask extends DataStagingTask {
+
+    private static final Logger logger = LogManager.getLogger(OutputDataStagingTask.class);
+
+    @Override
+    public TaskResult onRun(TaskHelper taskHelper) {
+
+        try {
+            // Get and validate data staging task model
+            DataStagingTaskModel dataStagingTaskModel = getDataStagingTaskModel();
+
+            // Fetch and validate input data type from data staging task model
+            OutputDataObjectType processOutput = dataStagingTaskModel.getProcessOutput();
+            if (processOutput != null && processOutput.getValue() == null) {
+                String message = "expId: " + getExperimentId() + ", processId: " + getProcessId() + ", taskId: " + getTaskId() +
+                        ":- Couldn't stage file " + processOutput.getName() + " , file name shouldn't be null. ";
+                logger.error(message);
+                if (processOutput.isIsRequired()) {
+                    message += "File name is null, but this output's isRequired bit is not set";
+                } else {
+                    message += "File name is null";
+                }
+                throw new TaskOnFailException(message, true, null);
+            }
+
+            // Fetch and validate storage resource
+            // Fetch and validate storage resource
+            StorageResourceDescription storageResource = getStorageResource();
+
+            // Fetch and validate source and destination URLS
+            URI sourceURI;
+            URI destinationURI;
+            String sourceFileName;
+            try {
+                sourceURI = new URI(dataStagingTaskModel.getSource());
+                destinationURI = new URI(dataStagingTaskModel.getDestination());
+
+                if (logger.isDebugEnabled()) {
+                    logger.debug("Source file " + sourceURI.getPath() + ", destination uri " + destinationURI.getPath() + " for task " + getTaskId());
+                }
+
+                sourceFileName = sourceURI.getPath().substring(sourceURI.getPath().lastIndexOf(File.separator) + 1,
+                        sourceURI.getPath().length());
+            } catch (URISyntaxException e) {
+                throw new TaskOnFailException("Failed to obtain source URI for output data staging task " + getTaskId(), true, e);
+            }
+
+            // Fetch and validate storage adaptor
+            StorageResourceAdaptor storageResourceAdaptor = getStorageAdaptor(taskHelper.getAdaptorSupport());
+
+            // Fetch and validate compute resource adaptor
+            AgentAdaptor adaptor = getComputeResourceAdaptor(taskHelper.getAdaptorSupport());
+
+            if (sourceFileName.contains("*")) {
+                // if file is declared as a wild card
+                logger.info("Handling output files with " + sourceFileName + " extension for task " + getTaskId());
+
+                String destParentPath = (new File(destinationURI.getPath())).getParentFile().getPath();
+                String sourceParentPath = (new File(sourceURI.getPath())).getParentFile().getPath();
+
+                logger.debug("Destination parent path " + destParentPath + ", source parent path " + sourceParentPath);
+                List<String> fileNames = null;
+                try {
+                    fileNames = adaptor.getFileNameFromExtension(sourceFileName, sourceParentPath);
+
+                    if (logger.isTraceEnabled()) {
+                        fileNames.forEach(fileName -> logger.trace("File found : " + fileName));
+                    }
+
+                } catch (AgentException e) {
+                    throw new TaskOnFailException("Failed to fetch the file list from extension " + sourceFileName, true, e);
+                }
+
+                for (String temp : fileNames) {
+                    if (temp != null && !temp.equals("")) {
+                        sourceFileName = temp;
+                    }
+                    if (destParentPath.endsWith(File.separator)) {
+                        destinationURI = new URI(destParentPath + sourceFileName);
+                    } else {
+                        destinationURI = new URI(destParentPath + File.separator + sourceFileName);
+                    }
+
+                    //Wildcard support is only enabled for output data staging
+                    processOutput.setName(sourceFileName);
+
+                    try {
+                        getTaskContext().getExperimentCatalog().add(ExpCatChildDataType.EXPERIMENT_OUTPUT, Arrays.asList(processOutput), getExperimentId());
+                        getTaskContext().getExperimentCatalog().add(ExpCatChildDataType.PROCESS_OUTPUT, Arrays.asList(processOutput), getProcessId());
+                    } catch (RegistryException e) {
+                        throw new TaskOnFailException("Failed to update experiment or process outputs for task " + getTaskId(), true, e);
+                    }
+
+                    logger.info("Transferring file " + sourceFileName);
+                    transferFile(sourceURI, destinationURI, sourceFileName, adaptor, storageResourceAdaptor);
+                }
+
+            } else {
+                // Downloading input file from the storage resource
+                transferFile(sourceURI, destinationURI, sourceFileName, adaptor, storageResourceAdaptor);
+                return onSuccess("Input data staging task " + getTaskId() + " successfully completed");
+            }
+
+        } catch (TaskOnFailException e) {
+            if (e.getError() != null) {
+                logger.error(e.getReason(), e.getError());
+            } else {
+                logger.error(e.getReason());
+            }
+            return onFail(e.getReason(), e.isCritical(), e.getError());
+
+        } catch (Exception e) {
+            logger.error("Unknown error while executing output data staging task " + getTaskId(), e);
+            return onFail("Unknown error while executing output data staging task " + getTaskId(), false,  e);
+        }
+
+        return null;
+    }
+
+    private void transferFile(URI sourceURI, URI destinationURI, String fileName, AgentAdaptor adaptor,
+                              StorageResourceAdaptor storageResourceAdaptor) throws TaskOnFailException {
+        String localSourceFilePath = getLocalDataPath(fileName);
+
+        try {
+            logger.info("Downloading output file " + sourceURI.getPath() + " to the local path " + localSourceFilePath);
+            adaptor.copyFileFrom(sourceURI.getPath(), localSourceFilePath);
+            logger.info("Output file downloaded to " + localSourceFilePath);
+        } catch (AgentException e) {
+            throw new TaskOnFailException("Failed downloading output file " + sourceURI.getPath() + " to the local path " +
+                    localSourceFilePath, true, e);
+        }
+
+        // Uploading input file to the compute resource
+        try {
+            logger.info("Uploading the output file to " + destinationURI.getPath() + " from local path " + localSourceFilePath);
+            storageResourceAdaptor.uploadFile(localSourceFilePath, destinationURI.getPath());
+            logger.info("Output file uploaded to " + destinationURI.getPath());
+        } catch (AgentException e) {
+            throw new TaskOnFailException("Failed uploading the output file to " + destinationURI.getPath() + " from local path " +
+                    localSourceFilePath, true, e);
+        }
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
index f33d8a1..64a7de8 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
@@ -511,7 +511,11 @@ public class TaskContext {
         this.currentExecutingTaskModel = currentExecutingTaskModel;
     }
 
-    public StorageResourceDescription getStorageResource() {
+    public StorageResourceDescription getStorageResource() throws AppCatalogException {
+        if (storageResource == null) {
+            this.storageResource = appCatalog.getStorageResource()
+                    .getStorageResource(processModel.getStorageResourceId());
+        }
         return storageResource;
     }
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
new file mode 100644
index 0000000..196a219
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
@@ -0,0 +1,28 @@
+package org.apache.airavata.helix.impl.task;
+
+public class TaskOnFailException extends Exception {
+
+
+    private String reason;
+    private boolean critical;
+    private Throwable e;
+
+    public TaskOnFailException(String reason, boolean critical, Throwable e) {
+        super(reason, e);
+        this.reason = reason;
+        this.critical = critical;
+        this.e = e;
+    }
+
+    public String getReason() {
+        return reason;
+    }
+
+    public boolean isCritical() {
+        return critical;
+    }
+
+    public Throwable getError() {
+        return e;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
index 16e8114..e4267ce 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
@@ -67,8 +67,7 @@ public class GroovyMapBuilder {
         inputValues.addAll(getProcessOutputValues(taskContext.getProcessModel().getProcessOutputs(), false));
         mapData.setInputsAll(inputValuesAll);
 
-        //mapData.setUserName(taskContext.geJo)
-
+        mapData.setUserName(taskContext.getComputeResourceLoginUserName());
         mapData.setShellName("/bin/bash");
 
         if (taskContext != null) {
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
index 1a024a7..b517af1 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
@@ -60,7 +60,7 @@ public abstract class JobSubmissionTask extends AiravataTask {
 
         logger.info("Copying file form " + tempJobFile.getAbsolutePath() + " to remote path " + workingDirectory +
                 " of compute resource " + getTaskContext().getComputeResourceId());
-        agentAdaptor.copyFile(tempJobFile.getAbsolutePath(), workingDirectory);
+        agentAdaptor.copyFileTo(tempJobFile.getAbsolutePath(), workingDirectory);
         // TODO transfer file
         RawCommandInfo submitCommand = jobManagerConfiguration.getSubmitCommand(workingDirectory, tempJobFile.getPath());
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
index 99db2c4..63921db 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
@@ -4,6 +4,8 @@ import org.apache.airavata.helix.core.AbstractTask;
 import org.apache.airavata.helix.core.OutPort;
 import org.apache.airavata.helix.impl.task.AiravataTask;
 import org.apache.airavata.helix.impl.task.EnvSetupTask;
+import org.apache.airavata.helix.impl.task.InputDataStagingTask;
+import org.apache.airavata.helix.impl.task.OutputDataStagingTask;
 import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
 import org.apache.airavata.helix.workflow.WorkflowManager;
 import org.apache.airavata.model.experiment.ExperimentModel;
@@ -37,14 +39,25 @@ public class SimpleWorkflow {
         String[] taskIds = taskDag.split(",");
         final List<AiravataTask> allTasks = new ArrayList<>();
 
+        boolean jobSubmissionFound = false;
+
         for (String taskId : taskIds) {
             Optional<TaskModel> model = taskList.stream().filter(taskModel -> taskModel.getTaskId().equals(taskId)).findFirst();
-            model.ifPresent(taskModel -> {
+
+            if (model.isPresent()) {
+                TaskModel taskModel = model.get();
                 AiravataTask airavataTask = null;
                 if (taskModel.getTaskType() == TaskTypes.ENV_SETUP) {
                     airavataTask = new EnvSetupTask();
                 } else if (taskModel.getTaskType() == TaskTypes.JOB_SUBMISSION) {
                     airavataTask = new DefaultJobSubmissionTask();
+                    jobSubmissionFound = true;
+                } else if (taskModel.getTaskType() == TaskTypes.DATA_STAGING) {
+                    if (jobSubmissionFound) {
+                        airavataTask = new OutputDataStagingTask();
+                    } else {
+                        airavataTask = new InputDataStagingTask();
+                    }
                 }
 
                 if (airavataTask != null) {
@@ -57,7 +70,7 @@ public class SimpleWorkflow {
                     }
                     allTasks.add(airavataTask);
                 }
-            });
+            }
         }
 
 /*        DefaultJobSubmissionTask defaultJobSubmissionTask = new DefaultJobSubmissionTask();

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 10/17: Improving status publishing

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 1c3a5d4eca7a3fd455adea1ce56437ec21499bd7
Author: dimuthu <di...@gmail.com>
AuthorDate: Mon Mar 5 08:46:53 2018 -0500

    Improving status publishing
---
 .../helix/impl/participant/GlobalParticipant.java  |   1 +
 .../airavata/helix/impl/task/AiravataTask.java     | 136 ++++++++++++++++++---
 .../airavata/helix/impl/task/CompletingTask.java   |  26 ++++
 .../airavata/helix/impl/task/EnvSetupTask.java     |   3 +
 .../helix/impl/task/InputDataStagingTask.java      |   7 +-
 .../helix/impl/task/OutputDataStagingTask.java     |   3 +
 .../airavata/helix/impl/task/TaskContext.java      |  16 +++
 .../submission/task/DefaultJobSubmissionTask.java  |  15 +--
 .../submission/task/ForkJobSubmissionTask.java     |   2 +-
 .../task/submission/task/JobSubmissionTask.java    |  43 +------
 .../submission/task/LocalJobSubmissionTask.java    |   4 +-
 .../helix/impl/workflow/PostWorkflowManager.java   |  88 +++++++++++--
 .../job/monitor/kafka/MessageProducer.java         |   1 +
 13 files changed, 266 insertions(+), 79 deletions(-)

diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
index 984b277..fc3fbcb 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
@@ -18,6 +18,7 @@ public class GlobalParticipant extends HelixParticipant {
         "org.apache.airavata.helix.impl.task.EnvSetupTask",
         "org.apache.airavata.helix.impl.task.InputDataStagingTask",
         "org.apache.airavata.helix.impl.task.OutputDataStagingTask",
+        "org.apache.airavata.helix.impl.task.CompletingTask",
         "org.apache.airavata.helix.impl.task.submission.task.ForkJobSubmissionTask",
         "org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask",
         "org.apache.airavata.helix.impl.task.submission.task.LocalJobSubmissionTask"
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
index e15195d..03dedf3 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
@@ -1,25 +1,21 @@
 package org.apache.airavata.helix.impl.task;
 
+import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.helix.core.AbstractTask;
 import org.apache.airavata.helix.core.OutPort;
 import org.apache.airavata.helix.task.api.annotation.TaskOutPort;
 import org.apache.airavata.helix.task.api.annotation.TaskParam;
 import org.apache.airavata.messaging.core.MessageContext;
+import org.apache.airavata.messaging.core.MessagingFactory;
 import org.apache.airavata.messaging.core.Publisher;
+import org.apache.airavata.messaging.core.Type;
+import org.apache.airavata.messaging.core.impl.RabbitMQPublisher;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
-import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
-import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
-import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
-import org.apache.airavata.model.messaging.event.MessageType;
-import org.apache.airavata.model.messaging.event.TaskIdentifier;
-import org.apache.airavata.model.messaging.event.TaskStatusChangeEvent;
+import org.apache.airavata.model.commons.ErrorModel;
+import org.apache.airavata.model.messaging.event.*;
 import org.apache.airavata.model.process.ProcessModel;
-import org.apache.airavata.model.status.TaskState;
-import org.apache.airavata.model.status.TaskStatus;
+import org.apache.airavata.model.status.*;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.*;
 import org.apache.helix.HelixManager;
@@ -27,7 +23,8 @@ import org.apache.helix.task.TaskResult;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
-import java.util.*;
+import java.io.PrintWriter;
+import java.io.StringWriter;
 
 public abstract class AiravataTask extends AbstractTask {
 
@@ -61,17 +58,128 @@ public abstract class AiravataTask extends AbstractTask {
     }
 
     protected TaskResult onFail(String reason, boolean fatal, Throwable error) {
+
         String errorMessage;
+        ProcessStatus status = new ProcessStatus(ProcessState.FAILED);
+        StringWriter errors = new StringWriter();
 
         if (error == null) {
             errorMessage = "Task " + getTaskId() + " failed due to " + reason;
+            errors.write(errorMessage);
+            status.setReason(errorMessage);
             logger.error(errorMessage);
+
         } else {
             errorMessage = "Task " + getTaskId() + " failed due to " + reason + ", " + error.getMessage();
+            status.setReason(errorMessage);
+            error.printStackTrace(new PrintWriter(errors));
             logger.error(errorMessage, error);
         }
+        status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+        getTaskContext().setProcessStatus(status);
+
+        ErrorModel errorModel = new ErrorModel();
+        errorModel.setUserFriendlyMessage("GFac Worker throws an exception");
+        errorModel.setActualErrorMessage(errors.toString());
+        errorModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
+
+        saveAndPublishProcessStatus();
+        saveExperimentError(errorModel);
+        saveProcessError(errorModel);
         return new TaskResult(fatal ? TaskResult.Status.FATAL_FAILED : TaskResult.Status.FAILED, errorMessage);
+    }
 
+    public void saveAndPublishProcessStatus(ProcessState state) {
+        ProcessStatus processStatus = new ProcessStatus(state);
+        processStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+        getTaskContext().setProcessStatus(processStatus);
+        saveAndPublishProcessStatus();
+    }
+
+    public void saveAndPublishProcessStatus() {
+        try {
+            ProcessStatus status = taskContext.getProcessStatus();
+            if (status.getTimeOfStateChange() == 0 || status.getTimeOfStateChange() > 0 ){
+                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+            }else {
+                status.setTimeOfStateChange(status.getTimeOfStateChange());
+            }
+            experimentCatalog.add(ExpCatChildDataType.PROCESS_STATUS, status, getProcessId());
+            ProcessIdentifier identifier = new ProcessIdentifier(getProcessId(), getExperimentId(), getGatewayId());
+            ProcessStatusChangeEvent processStatusChangeEvent = new ProcessStatusChangeEvent(status.getState(), identifier);
+            MessageContext msgCtx = new MessageContext(processStatusChangeEvent, MessageType.PROCESS,
+                    AiravataUtils.getId(MessageType.PROCESS.name()), getGatewayId());
+            msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+            getStatusPublisher().publish(msgCtx);
+        } catch (Exception e) {
+            logger.error("Failed to save process status of process " + getProcessId(), e);
+        }
+    }
+
+    public void saveAndPublishTaskStatus() {
+        try {
+            TaskState state = getTaskContext().getTaskState();
+            // first we save job jobModel to the registry for sa and then save the job status.
+            TaskStatus status = getTaskContext().getTaskStatus();
+            if (status.getTimeOfStateChange() == 0 || status.getTimeOfStateChange() > 0 ){
+                status.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+            }else {
+                status.setTimeOfStateChange(status.getTimeOfStateChange());
+            }
+            experimentCatalog.add(ExpCatChildDataType.TASK_STATUS, status, getTaskId());
+            TaskIdentifier identifier = new TaskIdentifier(getTaskId(), getProcessId(), getExperimentId(), getGatewayId());
+            TaskStatusChangeEvent taskStatusChangeEvent = new TaskStatusChangeEvent(state,
+                    identifier);
+            MessageContext msgCtx = new MessageContext(taskStatusChangeEvent, MessageType.TASK, AiravataUtils.getId
+                    (MessageType.TASK.name()), getGatewayId());
+            msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+            getStatusPublisher().publish(msgCtx);
+        } catch (Exception e) {
+            logger.error("Failed to publist task status of task " + getTaskId());
+        }
+    }
+
+    public void saveExperimentError(ErrorModel errorModel) {
+        try {
+            errorModel.setErrorId(AiravataUtils.getId("EXP_ERROR"));
+            getExperimentCatalog().add(ExpCatChildDataType.EXPERIMENT_ERROR, errorModel, experimentId);
+        } catch (RegistryException e) {
+            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId() + " : - Error while updating experiment errors";
+            logger.error(msg, e);
+        }
+    }
+
+    public void saveProcessError(ErrorModel errorModel) {
+        try {
+            errorModel.setErrorId(AiravataUtils.getId("PROCESS_ERROR"));
+            experimentCatalog.add(ExpCatChildDataType.PROCESS_ERROR, errorModel, getProcessId());
+        } catch (RegistryException e) {
+            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId()
+                    + " : - Error while updating process errors";
+            logger.error(msg, e);
+        }
+    }
+
+    public void saveTaskError(ErrorModel errorModel) throws Exception {
+        try {
+            errorModel.setErrorId(AiravataUtils.getId("TASK_ERROR"));
+            getExperimentCatalog().add(ExpCatChildDataType.TASK_ERROR, errorModel, getTaskId());
+        } catch (RegistryException e) {
+            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId() + " taskId: " + getTaskId()
+                    + " : - Error while updating task errors";
+            throw new Exception(msg, e);
+        }
+    }
+
+    public Publisher getStatusPublisher() throws AiravataException {
+        if (statusPublisher == null) {
+            synchronized (RabbitMQPublisher.class) {
+                if (statusPublisher == null) {
+                    statusPublisher = MessagingFactory.getPublisher(Type.STATUS);
+                }
+            }
+        }
+        return statusPublisher;
     }
 
     @Override
@@ -145,10 +253,6 @@ public abstract class AiravataTask extends AbstractTask {
         return experimentCatalog;
     }
 
-    public Publisher getStatusPublisher() {
-        return statusPublisher;
-    }
-
     public String getProcessId() {
         return processId;
     }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java
new file mode 100644
index 0000000..9ec2909
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java
@@ -0,0 +1,26 @@
+package org.apache.airavata.helix.impl.task;
+
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.model.status.ProcessState;
+import org.apache.helix.task.TaskResult;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+@TaskDef(name = "Completing Task")
+public class CompletingTask extends AiravataTask {
+
+    private static final Logger logger = LogManager.getLogger(CompletingTask.class);
+
+    @Override
+    public TaskResult onRun(TaskHelper helper) {
+        logger.info("Process " + getProcessId() + " successfully completed");
+        saveAndPublishProcessStatus(ProcessState.COMPLETED);
+        return onSuccess("Process " + getProcessId() + " successfully completed");
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
index ddba5f2..abdc1bf 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.model.status.ProcessState;
 import org.apache.airavata.model.status.TaskState;
 import org.apache.airavata.registry.cpi.RegistryException;
 import org.apache.helix.task.TaskResult;
@@ -17,6 +18,8 @@ public class EnvSetupTask extends AiravataTask {
     @Override
     public TaskResult onRun(TaskHelper taskHelper) {
         try {
+
+            saveAndPublishProcessStatus(ProcessState.CONFIGURING_WORKSPACE);
             publishTaskState(TaskState.EXECUTING);
             AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
                     getTaskContext().getGatewayId(),
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
index 30eeec0..ed143dd 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
@@ -3,20 +3,17 @@ package org.apache.airavata.helix.impl.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.AgentException;
 import org.apache.airavata.agents.api.StorageResourceAdaptor;
-import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
 import org.apache.airavata.model.application.io.InputDataObjectType;
+import org.apache.airavata.model.status.ProcessState;
 import org.apache.airavata.model.task.DataStagingTaskModel;
-import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.apache.commons.io.FileUtils;
 import org.apache.helix.task.TaskResult;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
 import java.io.File;
-import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
 
@@ -29,6 +26,8 @@ public class InputDataStagingTask extends DataStagingTask {
     public TaskResult onRun(TaskHelper taskHelper) {
         logger.info("Starting Input Data Staging Task " + getTaskId());
 
+        saveAndPublishProcessStatus(ProcessState.INPUT_DATA_STAGING);
+
         try {
             // Get and validate data staging task model
             DataStagingTaskModel dataStagingTaskModel = getDataStagingTaskModel();
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
index f33523c..ff8fd2e 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
@@ -7,6 +7,7 @@ import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
 import org.apache.airavata.model.application.io.OutputDataObjectType;
+import org.apache.airavata.model.status.ProcessState;
 import org.apache.airavata.model.task.DataStagingTaskModel;
 import org.apache.airavata.registry.cpi.ExpCatChildDataType;
 import org.apache.airavata.registry.cpi.RegistryException;
@@ -29,6 +30,8 @@ public class OutputDataStagingTask extends DataStagingTask {
     public TaskResult onRun(TaskHelper taskHelper) {
 
         logger.info("Starting output data staging task " + getTaskId());
+        saveAndPublishProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
+
         try {
             // Get and validate data staging task model
             DataStagingTaskModel dataStagingTaskModel = getDataStagingTaskModel();
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
index 489a196..6be1d36 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
@@ -21,6 +21,8 @@ import org.apache.airavata.model.process.ProcessModel;
 import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel;
 import org.apache.airavata.model.status.ProcessState;
 import org.apache.airavata.model.status.ProcessStatus;
+import org.apache.airavata.model.status.TaskState;
+import org.apache.airavata.model.status.TaskStatus;
 import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
@@ -385,6 +387,20 @@ public class TaskContext {
             return null;
     }
 
+    public TaskState getTaskState() {
+        if(getCurrentTaskModel().getTaskStatuses() != null)
+            return getCurrentTaskModel().getTaskStatuses().get(0).getState();
+        else
+            return null;
+    }
+
+    public TaskStatus getTaskStatus() {
+        if(getCurrentTaskModel().getTaskStatuses() != null)
+            return getCurrentTaskModel().getTaskStatuses().get(0);
+        else
+            return null;
+    }
+
     public String getComputeResourceId() {
         if (isUseUserCRPref() &&
                 userComputeResourcePreference != null &&
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
index 31b6f30..688f894 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -5,24 +5,18 @@ import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
-import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
 import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.commons.ErrorModel;
 import org.apache.airavata.model.experiment.ExperimentModel;
 import org.apache.airavata.model.job.JobModel;
-import org.apache.airavata.model.status.JobState;
-import org.apache.airavata.model.status.JobStatus;
-import org.apache.airavata.model.status.TaskState;
-import org.apache.airavata.model.status.TaskStatus;
+import org.apache.airavata.model.status.*;
 import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
-import org.apache.commons.io.FileUtils;
 import org.apache.helix.task.TaskResult;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
-import java.io.File;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -38,6 +32,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
     public TaskResult onRun(TaskHelper taskHelper) {
 
         try {
+            saveAndPublishProcessStatus(ProcessState.EXECUTING);
 
             GroovyMapData mapData = new GroovyMapBuilder(getTaskContext()).build();
 
@@ -133,14 +128,14 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                     jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
                     jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                     jobModel.setJobStatuses(Arrays.asList(jobStatus));
-                    saveJobStatus(jobModel);
+                    saveAndPublishJobStatus(jobModel);
 
                     if (verifyJobSubmissionByJobId(adaptor, jobId)) {
                         jobStatus.setJobState(JobState.QUEUED);
                         jobStatus.setReason("Verification step succeeded");
                         jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                         jobModel.setJobStatuses(Arrays.asList(jobStatus));
-                        saveJobStatus(jobModel);
+                        saveAndPublishJobStatus(jobModel);
                         createMonitoringNode(jobId);
                     }
 
@@ -172,7 +167,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                             jobStatus.setReason("Verification step succeeded");
                             jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                             jobModel.setJobStatuses(Arrays.asList(jobStatus));
-                            saveJobStatus(jobModel);
+                            saveAndPublishJobStatus(jobModel);
                             //taskStatus.setState(TaskState.COMPLETED);
                             //taskStatus.setReason("Submitted job to compute resource");
                             //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
index 2e4a052..e3b5447 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
@@ -58,7 +58,7 @@ public class ForkJobSubmissionTask extends JobSubmissionTask {
                     jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
                     jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                     jobModel.setJobStatuses(Arrays.asList(jobStatus));
-                    saveJobStatus(jobModel);
+                    saveAndPublishJobStatus(jobModel);
 
                     return null;
                 } else {
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
index afa2630..4fed22d 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
@@ -73,6 +73,8 @@ public abstract class JobSubmissionTask extends AiravataTask {
         this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/lock", new byte[0]);
         this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/gateway", getGatewayId().getBytes());
         this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/process", getProcessId().getBytes());
+        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/task", getTaskId().getBytes());
+        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/experiment", getExperimentId().getBytes());
         this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/status", "pending".getBytes());
     }
 
@@ -146,48 +148,11 @@ public abstract class JobSubmissionTask extends AiravataTask {
         return jobManagerConfiguration.getParser().parseJobId(jobName, commandOutput.getStdOut());
     }
 
-    ////////////////////////////////
-
-
-    /////////////////////////////////////////////
-    public void saveExperimentError(ErrorModel errorModel) throws Exception {
-        try {
-            errorModel.setErrorId(AiravataUtils.getId("EXP_ERROR"));
-            getExperimentCatalog().add(ExpCatChildDataType.EXPERIMENT_ERROR, errorModel, getExperimentId());
-        } catch (RegistryException e) {
-            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId()
-                    + " : - Error while updating experiment errors";
-            throw new Exception(msg, e);
-        }
-    }
-
-    public void saveProcessError(ErrorModel errorModel) throws Exception {
-        try {
-            errorModel.setErrorId(AiravataUtils.getId("PROCESS_ERROR"));
-            getExperimentCatalog().add(ExpCatChildDataType.PROCESS_ERROR, errorModel, getProcessId());
-        } catch (RegistryException e) {
-            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId()
-                    + " : - Error while updating process errors";
-            throw new Exception(msg, e);
-        }
-    }
-
-    public void saveTaskError(ErrorModel errorModel) throws Exception {
-        try {
-            errorModel.setErrorId(AiravataUtils.getId("TASK_ERROR"));
-            getExperimentCatalog().add(ExpCatChildDataType.TASK_ERROR, errorModel, getTaskId());
-        } catch (RegistryException e) {
-            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId() + " taskId: " + getTaskId()
-                    + " : - Error while updating task errors";
-            throw new Exception(msg, e);
-        }
-    }
-
     public void saveJobModel(JobModel jobModel) throws RegistryException {
         getExperimentCatalog().add(ExpCatChildDataType.JOB, jobModel, getProcessId());
     }
 
-    public void saveJobStatus(JobModel jobModel) throws Exception {
+    public void saveAndPublishJobStatus(JobModel jobModel) throws Exception {
         try {
             // first we save job jobModel to the registry for sa and then save the job status.
             JobStatus jobStatus = null;
@@ -213,7 +178,7 @@ public abstract class JobSubmissionTask extends AiravataTask {
             MessageContext msgCtx = new MessageContext(jobStatusChangeEvent, MessageType.JOB, AiravataUtils.getId
                     (MessageType.JOB.name()), getGatewayId());
             msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
-            //getStatusPublisher().publish(msgCtx);
+            getStatusPublisher().publish(msgCtx);
         } catch (Exception e) {
             throw new Exception("Error persisting job status " + e.getLocalizedMessage(), e);
         }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
index e3ae4fa..cea6750 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
@@ -58,7 +58,7 @@ public class LocalJobSubmissionTask extends JobSubmissionTask {
                 jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                 jobModel.setJobStatuses(Arrays.asList(jobStatus));
 
-                saveJobStatus(jobModel);
+                saveAndPublishJobStatus(jobModel);
 
                 jobModel.setExitCode(submissionOutput.getExitCode());
                 jobModel.setStdErr(submissionOutput.getStdErr());
@@ -69,7 +69,7 @@ public class LocalJobSubmissionTask extends JobSubmissionTask {
                 jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                 jobModel.setJobStatuses(Arrays.asList(jobStatus));
 
-                saveJobStatus(jobModel);
+                saveAndPublishJobStatus(jobModel);
 
                 return null;
             }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
index 383fe37..07a9aee 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
@@ -1,26 +1,33 @@
 package org.apache.airavata.helix.impl.workflow;
 
+import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.core.OutPort;
-import org.apache.airavata.helix.impl.task.AiravataTask;
-import org.apache.airavata.helix.impl.task.EnvSetupTask;
-import org.apache.airavata.helix.impl.task.InputDataStagingTask;
-import org.apache.airavata.helix.impl.task.OutputDataStagingTask;
+import org.apache.airavata.helix.impl.task.*;
 import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
 import org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask;
 import org.apache.airavata.helix.workflow.WorkflowManager;
 import org.apache.airavata.job.monitor.kafka.JobStatusResultDeserializer;
 import org.apache.airavata.job.monitor.parser.JobStatusResult;
+import org.apache.airavata.messaging.core.MessageContext;
+import org.apache.airavata.messaging.core.MessagingFactory;
+import org.apache.airavata.messaging.core.Publisher;
+import org.apache.airavata.messaging.core.Type;
+import org.apache.airavata.messaging.core.impl.RabbitMQPublisher;
 import org.apache.airavata.model.experiment.ExperimentModel;
+import org.apache.airavata.model.job.JobModel;
+import org.apache.airavata.model.messaging.event.JobIdentifier;
+import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
+import org.apache.airavata.model.messaging.event.MessageType;
 import org.apache.airavata.model.process.ProcessModel;
 import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
 import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.model.task.TaskTypes;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
-import org.apache.airavata.registry.cpi.AppCatalog;
-import org.apache.airavata.registry.cpi.ExperimentCatalog;
-import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
+import org.apache.airavata.registry.cpi.*;
 import org.apache.curator.RetryPolicy;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
@@ -46,6 +53,7 @@ public class PostWorkflowManager {
     private final String TOPIC = "parsed-data";
 
     private CuratorFramework curatorClient = null;
+    private Publisher statusPublisher;
 
     private void init() throws ApplicationSettingsException {
         RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
@@ -66,6 +74,18 @@ public class PostWorkflowManager {
         return consumer;
     }
 
+    private String getExperimentIdByJobId(String jobId) throws Exception {
+        byte[] processBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/experiment");
+        String process = new String(processBytes);
+        return process;
+    }
+
+    private String getTaskIdByJobId(String jobId) throws Exception {
+        byte[] processBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/task");
+        String process = new String(processBytes);
+        return process;
+    }
+
     private String getProcessIdByJobId(String jobId) throws Exception {
         byte[] processBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/process");
         String process = new String(processBytes);
@@ -101,6 +121,8 @@ public class PostWorkflowManager {
             if (hasMonitoringRegistered(jobStatusResult.getJobId())) {
                 String gateway = getGatewayByJobId(jobStatusResult.getJobId());
                 String processId = getProcessIdByJobId(jobStatusResult.getJobId());
+                String experimentId = getExperimentIdByJobId(jobStatusResult.getJobId());
+                String task = getTaskIdByJobId(jobStatusResult.getJobId());
                 String status = getStatusByJobId(jobStatusResult.getJobId());
 
                 logger.info("Starting the post workflow for job id : " + jobStatusResult.getJobId() + " with process id "
@@ -111,6 +133,8 @@ public class PostWorkflowManager {
 
                 } else {
 
+                    saveAndPublishJobStatus(jobStatusResult.getJobId(), task, processId, experimentId, gateway, jobStatusResult.getState());
+
                     if (jobStatusResult.getState() == JobState.COMPLETE) {
                         logger.info("Job " + jobStatusResult.getJobId() + " was completed");
 
@@ -151,6 +175,14 @@ public class PostWorkflowManager {
                                 }
                             }
                         }
+
+                        CompletingTask completingTask = new CompletingTask();
+                        completingTask.setGatewayId(experimentModel.getGatewayId());
+                        completingTask.setExperimentId(experimentModel.getExperimentId());
+                        completingTask.setProcessId(processModel.getProcessId());
+                        completingTask.setTaskId("Completing-Task");
+                        allTasks.add(completingTask);
+
                         WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster",
                                 "wm-23", ServerSettings.getZookeeperConnection());
 
@@ -189,6 +221,48 @@ public class PostWorkflowManager {
         }
     }
 
+    public void saveAndPublishJobStatus(String jobId, String taskId, String processId, String experimentId, String gateway,
+                                        JobState jobState) throws Exception {
+        try {
+
+            JobStatus jobStatus = new JobStatus();
+            jobStatus.setReason(jobState.name());
+            jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+            jobStatus.setJobState(jobState);
+
+            if (jobStatus.getTimeOfStateChange() == 0 || jobStatus.getTimeOfStateChange() > 0 ) {
+                jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+            } else {
+                jobStatus.setTimeOfStateChange(jobStatus.getTimeOfStateChange());
+            }
+
+            CompositeIdentifier ids = new CompositeIdentifier(taskId, jobId);
+            ExperimentCatalog experimentCatalog = RegistryFactory.getExperimentCatalog(gateway);
+            experimentCatalog.add(ExpCatChildDataType.JOB_STATUS, jobStatus, ids);
+            JobIdentifier identifier = new JobIdentifier(jobId, taskId,
+                    processId, experimentId, gateway);
+
+            JobStatusChangeEvent jobStatusChangeEvent = new JobStatusChangeEvent(jobStatus.getJobState(), identifier);
+            MessageContext msgCtx = new MessageContext(jobStatusChangeEvent, MessageType.JOB, AiravataUtils.getId
+                    (MessageType.JOB.name()), gateway);
+            msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+            getStatusPublisher().publish(msgCtx);
+        } catch (Exception e) {
+            throw new Exception("Error persisting job status " + e.getLocalizedMessage(), e);
+        }
+    }
+
+    public Publisher getStatusPublisher() throws AiravataException {
+        if (statusPublisher == null) {
+            synchronized (RabbitMQPublisher.class) {
+                if (statusPublisher == null) {
+                    statusPublisher = MessagingFactory.getPublisher(Type.STATUS);
+                }
+            }
+        }
+        return statusPublisher;
+    }
+
     public static void main(String[] args) throws Exception {
 
         PostWorkflowManager postManager = new PostWorkflowManager();
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java
index 748a533..9f6d7b8 100644
--- a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/kafka/MessageProducer.java
@@ -32,5 +32,6 @@ public class MessageProducer {
     public void submitMessageToQueue(JobStatusResult jobStatusResult) throws ExecutionException, InterruptedException {
         final ProducerRecord<String, JobStatusResult> record = new ProducerRecord<>(TOPIC, jobStatusResult);
         RecordMetadata recordMetadata = producer.send(record).get();
+        producer.flush();
     }
 }

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 01/17: Initial helix migration

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit c9a1b064bb71dd936bb7f480bf4d7703ba7ad9a0
Author: dimuthu <di...@gmail.com>
AuthorDate: Wed Feb 21 13:31:29 2018 -0500

    Initial helix migration
---
 modules/airavata-helix/agent-api/pom.xml           |  47 +++
 .../apache/airavata/agents/api/AdaptorParams.java  |  26 ++
 .../java/org/apache/airavata/agents/api/Agent.java |  10 +
 .../apache/airavata/agents/api/AgentAdaptor.java   |  23 ++
 .../apache/airavata/agents/api/AgentException.java |  30 ++
 .../org/apache/airavata/agents/api/AgentStore.java | 103 +++++
 .../apache/airavata/agents/api/CommandOutput.java  |  16 +
 .../airavata/agents/api/JobSubmissionOutput.java   |  74 ++++
 modules/airavata-helix/agent-impl/pom.xml          |  27 ++
 .../airavata-helix/agent-impl/ssh-agent/pom.xml    |  76 ++++
 .../helix/agent/local/LocalAgentAdaptor.java       |  43 +++
 .../airavata/helix/agent/ssh/SshAdaptorParams.java | 116 ++++++
 .../airavata/helix/agent/ssh/SshAgentAdaptor.java  | 430 +++++++++++++++++++++
 .../helix/agent/ssh/StandardOutReader.java         |  83 ++++
 modules/airavata-helix/pom.xml                     |  24 ++
 modules/airavata-helix/task-api/pom.xml            |  41 ++
 .../apache/airavata/helix/task/api/TaskHelper.java |  13 +
 .../helix/task/api/annotation/TaskDef.java         |  18 +
 .../helix/task/api/annotation/TaskOutPort.java     |  18 +
 .../helix/task/api/annotation/TaskParam.java       |  20 +
 .../helix/task/api/support/AdaptorSupport.java     |  52 +++
 .../src/main/resources/application.properties      |   3 +
 .../task-api/src/main/resources/log4j.properties   |   9 +
 modules/airavata-helix/task-core/pom.xml           |  47 +++
 .../apache/airavata/helix/core/AbstractTask.java   | 108 ++++++
 .../org/apache/airavata/helix/core/OutPort.java    |  44 +++
 .../helix/core/controller/HelixController.java     |  91 +++++
 .../helix/core/participant/HelixParticipant.java   | 171 ++++++++
 .../helix/core/support/AdaptorSupportImpl.java     |  47 +++
 .../helix/core/support/TaskHelperImpl.java         |  16 +
 .../airavata/helix/core/util/PropertyResolver.java |  44 +++
 .../apache/airavata/helix/core/util/TaskUtil.java  | 103 +++++
 modules/airavata-helix/workflow-impl/pom.xml       |  44 +++
 .../airavata/helix/workflow/SimpleWorkflow.java    |  40 ++
 .../airavata/helix/workflow/WorkflowManager.java   |  94 +++++
 modules/helix-spectator/pom.xml                    |  50 +++
 .../helix/impl/participant/GlobalParticipant.java  |  68 ++++
 .../airavata/helix/impl/task/AiravataTask.java     | 293 ++++++++++++++
 .../airavata/helix/impl/task/DataStagingTask.java  |  19 +
 .../airavata/helix/impl/task/EnvSetupTask.java     |  64 +++
 .../helix/impl/task/submission/GroovyMapData.java  | 415 ++++++++++++++++++++
 .../helix/impl/task/submission/Script.java         |  43 +++
 .../helix/impl/task/submission/ScriptTag.java      |  13 +
 .../helix/impl/task/submission/SubmissionUtil.java |  10 +
 .../impl/task/submission/config/JobFactory.java    | 102 +++++
 .../submission/config/JobManagerConfiguration.java |  29 ++
 .../impl/task/submission/config/OutputParser.java  |  41 ++
 .../task/submission/config/RawCommandInfo.java     |  22 ++
 .../config/imp/ForkJobConfiguration.java           | 113 ++++++
 .../impl/task/submission/config/imp/JobUtil.java   |  58 +++
 .../submission/config/imp/LSFJobConfiguration.java | 120 ++++++
 .../submission/config/imp/PBSJobConfiguration.java | 122 ++++++
 .../config/imp/SlurmJobConfiguration.java          | 117 ++++++
 .../submission/config/imp/UGEJobConfiguration.java | 117 ++++++
 .../parser/AiravataCustomCommandOutputParser.java  |  56 +++
 .../config/imp/parser/ForkOutputParser.java        |  58 +++
 .../config/imp/parser/LSFOutputParser.java         | 132 +++++++
 .../config/imp/parser/PBSOutputParser.java         | 142 +++++++
 .../config/imp/parser/SlurmOutputParser.java       | 137 +++++++
 .../config/imp/parser/UGEOutputParser.java         | 108 ++++++
 .../submission/task/DefaultJobSubmissionTask.java  | 232 +++++++++++
 .../submission/task/ForkJobSubmissionTask.java     |  79 ++++
 .../task/submission/task/JobSubmissionTask.java    | 202 ++++++++++
 .../submission/task/LocalJobSubmissionTask.java    |  81 ++++
 .../helix/impl/workflow/SimpleWorkflow.java        |  31 ++
 .../src/main/resources/airavata-server.properties  | 334 ++++++++++++++++
 .../src/main/resources/application.properties      |   3 +
 .../src/main/resources/log4j.properties            |  11 +
 pom.xml                                            |   2 +
 69 files changed, 5575 insertions(+)

diff --git a/modules/airavata-helix/agent-api/pom.xml b/modules/airavata-helix/agent-api/pom.xml
new file mode 100644
index 0000000..02ee48a
--- /dev/null
+++ b/modules/airavata-helix/agent-api/pom.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata-helix</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>agent-api</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.codehaus.jackson</groupId>
+            <artifactId>jackson-mapper-asl</artifactId>
+            <version>1.9.13</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-registry-cpi</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-registry-core</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+    <!--<build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.5.1</version>
+                <configuration>
+                    <source>${java.version}</source>
+                    <target>${java.version}</target>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>-->
+
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AdaptorParams.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AdaptorParams.java
new file mode 100644
index 0000000..ca6f80a
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AdaptorParams.java
@@ -0,0 +1,26 @@
+package org.apache.airavata.agents.api;
+
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class AdaptorParams {
+
+    public Object loadFromFile(File file) throws IOException {
+        ObjectMapper mapper = new ObjectMapper();
+        return mapper.readValue(file, this.getClass());
+    }
+
+    public void writeToFile(File file) throws IOException {
+        ObjectMapper mapper = new ObjectMapper();
+        mapper.writeValue(file, this);
+    }
+}
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/Agent.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/Agent.java
new file mode 100644
index 0000000..f48aa3e
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/Agent.java
@@ -0,0 +1,10 @@
+package org.apache.airavata.agents.api;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class Agent {
+}
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java
new file mode 100644
index 0000000..2d295de
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentAdaptor.java
@@ -0,0 +1,23 @@
+package org.apache.airavata.agents.api;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public interface AgentAdaptor {
+
+    public void init(String computeResource, String gatewayId, String userId, String token) throws AgentException;
+
+    public CommandOutput executeCommand(String command, String workingDirectory) throws AgentException;
+
+    public void createDirectory(String path) throws AgentException;
+
+    public void copyFile(String sourceFile, String destinationFile) throws AgentException;
+
+    public List<String> listDirectory(String path) throws AgentException;
+}
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentException.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentException.java
new file mode 100644
index 0000000..9dfe50e
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentException.java
@@ -0,0 +1,30 @@
+package org.apache.airavata.agents.api;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class AgentException extends Exception {
+
+    public AgentException() {
+        super();
+    }
+
+    public AgentException(String message) {
+        super(message);
+    }
+
+    public AgentException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+    public AgentException(Throwable cause) {
+        super(cause);
+    }
+
+    protected AgentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+        super(message, cause, enableSuppression, writableStackTrace);
+    }
+}
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentStore.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentStore.java
new file mode 100644
index 0000000..78f2276
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/AgentStore.java
@@ -0,0 +1,103 @@
+package org.apache.airavata.agents.api;
+
+import java.io.*;
+import java.net.URL;
+import java.net.URLClassLoader;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class AgentStore {
+    public AgentAdaptor fetchAdaptor(String computeResource, String Protocol, String authToken) throws AgentException {
+
+        AgentData agentData = getAgentDataForComputeResource(computeResource);
+
+        try {
+            URL[] urls = new URL[1];
+            urls[0] = new URL(agentData.getLibraryLocation());
+            URLClassLoader classLoader = new URLClassLoader(urls, AgentAdaptor.class.getClassLoader());
+
+            Class<?> clazz = classLoader.loadClass(agentData.getAdaptorClass());
+            AgentAdaptor agentAdaptor = (AgentAdaptor) clazz.newInstance();
+
+            Class<?> paramClazz = classLoader.loadClass(agentData.paramClass);
+            AdaptorParams adaptorParams = (AdaptorParams) paramClazz.newInstance();
+
+            Object paramsInit = adaptorParams.loadFromFile(new File(agentData.paramDataFile));
+
+            //agentAdaptor.init(paramsInit);
+            System.out.println("Done");
+
+            return agentAdaptor;
+
+        } catch (IllegalAccessException | ClassNotFoundException | InstantiationException | IOException e) {
+            e.printStackTrace();
+            throw new AgentException("Failed to fetch agent adaptor for compute resource " + computeResource, e);
+        }
+    }
+
+    public static void main(String args[]) throws InstantiationException, IOException, AgentException {
+        AgentStore store = new AgentStore();
+
+        AgentAdaptor agentAdaptor = store.fetchAdaptor("localhost", null, null);
+        System.out.println("Agent loaded");
+    }
+
+    private AgentData getAgentDataForComputeResource(String computeResource) {
+        if ("localhost".equals(computeResource)) {
+            return new AgentData().setLibraryLocation("file:///Users/dimuthu/code/fork/airavata-sandbox/airavata-helix/modules/agent-impl/ssh-agent/target/ssh-agent-1.0-SNAPSHOT-jar-with-dependencies.jar")
+                    .setAdaptorClass("org.apache.airavata.helix.agent.ssh.SshAgentAdaptor")
+                    .setParamClass("org.apache.airavata.helix.agent.ssh.SshAdaptorParams")
+                    .setParamDataFile("/tmp/ssh-param.json");
+        }
+
+        return null;
+    }
+
+    public static class AgentData {
+
+        private String libraryLocation;
+        private String adaptorClass;
+        private String paramClass;
+        private String paramDataFile;
+
+        public String getLibraryLocation() {
+            return libraryLocation;
+        }
+
+        public AgentData setLibraryLocation(String libraryLocation) {
+            this.libraryLocation = libraryLocation;
+            return this;
+        }
+
+        public String getAdaptorClass() {
+            return adaptorClass;
+        }
+
+        public AgentData setAdaptorClass(String adaptorClass) {
+            this.adaptorClass = adaptorClass;
+            return this;
+        }
+
+        public String getParamClass() {
+            return paramClass;
+        }
+
+        public AgentData setParamClass(String paramClass) {
+            this.paramClass = paramClass;
+            return this;
+        }
+
+        public String getParamDataFile() {
+            return paramDataFile;
+        }
+
+        public AgentData setParamDataFile(String paramDataFile) {
+            this.paramDataFile = paramDataFile;
+            return this;
+        }
+    }
+}
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/CommandOutput.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/CommandOutput.java
new file mode 100644
index 0000000..94a0118
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/CommandOutput.java
@@ -0,0 +1,16 @@
+package org.apache.airavata.agents.api;
+
+import java.io.OutputStream;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public interface CommandOutput {
+
+       String getStdOut();
+       String getStdError();
+       Integer getExitCode();
+}
diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java
new file mode 100644
index 0000000..1858826
--- /dev/null
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java
@@ -0,0 +1,74 @@
+package org.apache.airavata.agents.api;
+
+public class JobSubmissionOutput {
+    private int exitCode = Integer.MIN_VALUE;
+    private String stdOut;
+    private String stdErr;
+    private String command;
+    private String jobId;
+    private boolean isJobSubmissionFailed;
+    private String failureReason;
+
+    public int getExitCode() {
+        return exitCode;
+    }
+
+    public JobSubmissionOutput setExitCode(int exitCode) {
+        this.exitCode = exitCode;
+        return this;
+    }
+
+    public String getStdOut() {
+        return stdOut;
+    }
+
+    public JobSubmissionOutput setStdOut(String stdOut) {
+        this.stdOut = stdOut;
+        return this;
+    }
+
+    public String getStdErr() {
+        return stdErr;
+    }
+
+    public JobSubmissionOutput setStdErr(String stdErr) {
+        this.stdErr = stdErr;
+        return this;
+    }
+
+    public String getCommand() {
+        return command;
+    }
+
+    public JobSubmissionOutput setCommand(String command) {
+        this.command = command;
+        return this;
+    }
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    public JobSubmissionOutput setJobId(String jobId) {
+        this.jobId = jobId;
+        return this;
+    }
+
+    public boolean isJobSubmissionFailed() {
+        return isJobSubmissionFailed;
+    }
+
+    public JobSubmissionOutput setJobSubmissionFailed(boolean jobSubmissionFailed) {
+        isJobSubmissionFailed = jobSubmissionFailed;
+        return this;
+    }
+
+    public String getFailureReason() {
+        return failureReason;
+    }
+
+    public JobSubmissionOutput setFailureReason(String failureReason) {
+        this.failureReason = failureReason;
+        return this;
+    }
+}
diff --git a/modules/airavata-helix/agent-impl/pom.xml b/modules/airavata-helix/agent-impl/pom.xml
new file mode 100644
index 0000000..57f0c08
--- /dev/null
+++ b/modules/airavata-helix/agent-impl/pom.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata-helix</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>agent-impl</artifactId>
+    <packaging>pom</packaging>
+    <modules>
+        <module>ssh-agent</module>
+    </modules>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>agent-api</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/pom.xml b/modules/airavata-helix/agent-impl/ssh-agent/pom.xml
new file mode 100644
index 0000000..44cf919
--- /dev/null
+++ b/modules/airavata-helix/agent-impl/ssh-agent/pom.xml
@@ -0,0 +1,76 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>agent-impl</artifactId>
+        <groupId>org.apache</groupId>
+        <version>1.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>ssh-agent</artifactId>
+
+
+    <dependencies>
+        <dependency>
+            <groupId>com.jcraft</groupId>
+            <artifactId>jsch</artifactId>
+            <version>0.1.53</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-registry-cpi</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-registry-core</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-credential-store</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.5.1</version>
+                <configuration>
+                    <source>${java.version}</source>
+                    <target>${java.version}</target>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.4.1</version>
+                <configuration>
+                    <!-- get all project dependencies -->
+                    <descriptorRefs>
+                        <descriptorRef>jar-with-dependencies</descriptorRef>
+                    </descriptorRefs>
+                    <!-- MainClass in mainfest make a executable jar -->
+
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id>
+                        <!-- bind to the packaging phase -->
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java
new file mode 100644
index 0000000..af507bf
--- /dev/null
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/local/LocalAgentAdaptor.java
@@ -0,0 +1,43 @@
+package org.apache.airavata.helix.agent.local;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.AgentException;
+import org.apache.airavata.agents.api.CommandOutput;
+import org.apache.airavata.agents.api.JobSubmissionOutput;
+
+import java.io.File;
+import java.util.List;
+
+public class LocalAgentAdaptor implements AgentAdaptor {
+
+
+
+    public void init(Object agentPams) throws AgentException {
+
+    }
+
+    @Override
+    public void init(String computeResource, String gatewayId, String userId, String token) throws AgentException {
+
+    }
+
+    @Override
+    public CommandOutput executeCommand(String command, String workingDirectory) throws AgentException {
+        return null;
+    }
+
+    @Override
+    public void createDirectory(String path) throws AgentException {
+
+    }
+
+    @Override
+    public void copyFile(String sourceFile, String destinationFile) throws AgentException {
+
+    }
+
+    @Override
+    public List<String> listDirectory(String path) throws AgentException {
+        return null;
+    }
+}
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAdaptorParams.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAdaptorParams.java
new file mode 100644
index 0000000..f54ae60
--- /dev/null
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAdaptorParams.java
@@ -0,0 +1,116 @@
+package org.apache.airavata.helix.agent.ssh;
+
+import org.apache.airavata.agents.api.AdaptorParams;
+
+import java.io.*;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class SshAdaptorParams extends AdaptorParams implements Serializable {
+
+    private int port = 22;
+    private String hostName;
+    private String userName;
+
+    private String password;
+
+    private byte[] publicKey;
+    private byte[] privateKey;
+    private String passphrase;
+
+    private String knownHostsFilePath;
+    private boolean strictHostKeyChecking;
+
+    public int getPort() {
+        return port;
+    }
+
+    public SshAdaptorParams setPort(int port) {
+        this.port = port;
+        return this;
+    }
+
+    public String getHostName() {
+        return hostName;
+    }
+
+    public SshAdaptorParams setHostName(String hostName) {
+        this.hostName = hostName;
+        return this;
+    }
+
+    public String getUserName() {
+        return userName;
+    }
+
+    public SshAdaptorParams setUserName(String userName) {
+        this.userName = userName;
+        return this;
+    }
+
+    public String getPassword() {
+        return password;
+    }
+
+    public SshAdaptorParams setPassword(String password) {
+        this.password = password;
+        return this;
+    }
+
+    public byte[] getPublicKey() {
+        return publicKey;
+    }
+
+    public SshAdaptorParams setPublicKey(byte[] publicKey) {
+        this.publicKey = publicKey;
+        return this;
+    }
+
+    public byte[] getPrivateKey() {
+        return privateKey;
+    }
+
+    public SshAdaptorParams setPrivateKey(byte[] privateKey) {
+        this.privateKey = privateKey;
+        return this;
+    }
+
+    public String getPassphrase() {
+        return passphrase;
+    }
+
+    public SshAdaptorParams setPassphrase(String passphrase) {
+        this.passphrase = passphrase;
+        return this;
+    }
+
+    public String getKnownHostsFilePath() {
+        return knownHostsFilePath;
+    }
+
+    public SshAdaptorParams setKnownHostsFilePath(String knownHostsFilePath) {
+        this.knownHostsFilePath = knownHostsFilePath;
+        return this;
+    }
+
+    public boolean isStrictHostKeyChecking() {
+        return strictHostKeyChecking;
+    }
+
+    public SshAdaptorParams setStrictHostKeyChecking(boolean strictHostKeyChecking) {
+        this.strictHostKeyChecking = strictHostKeyChecking;
+        return this;
+    }
+
+    public static void main(String args[]) throws IOException {
+        SshAdaptorParams params = new SshAdaptorParams();
+        params.setUserName("dimuthu");
+        params.setPassword("upe");
+        params.setHostName("localhost");
+        params.writeToFile(new File("/tmp/ssh-param.json"));
+    }
+}
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
new file mode 100644
index 0000000..19b429c
--- /dev/null
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
@@ -0,0 +1,430 @@
+package org.apache.airavata.helix.agent.ssh;
+
+import com.jcraft.jsch.*;
+import org.apache.airavata.agents.api.*;
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.DBUtil;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.credential.store.credential.Credential;
+import org.apache.airavata.credential.store.credential.impl.ssh.SSHCredential;
+import org.apache.airavata.credential.store.store.CredentialStoreException;
+import org.apache.airavata.credential.store.store.impl.CredentialReaderImpl;
+import org.apache.airavata.model.appcatalog.computeresource.*;
+import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
+import org.apache.airavata.registry.cpi.AppCatalog;
+import org.apache.airavata.registry.cpi.AppCatalogException;
+import org.apache.airavata.registry.cpi.ComputeResource;
+
+import java.io.*;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+
+import static jdk.nashorn.internal.runtime.regexp.joni.Config.log;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class SshAgentAdaptor implements AgentAdaptor {
+
+    private Session session = null;
+    private AppCatalog appCatalog;
+    private ComputeResourceDescription computeResourceDescription;
+    private ResourceJobManager resourceJobManager;
+    private SSHJobSubmission sshJobSubmission;
+
+    public void init(AdaptorParams adaptorParams) throws AgentException {
+
+        if (adaptorParams instanceof SshAdaptorParams) {
+            SshAdaptorParams params = SshAdaptorParams.class.cast(adaptorParams);
+            JSch jSch = new JSch();
+            try {
+
+                if (params.getPassword() != null) {
+                    this.session = jSch.getSession(params.getUserName(), params.getHostName(), params.getPort());
+                    session.setPassword(params.getPassword());
+                    session.setUserInfo(new SftpUserInfo(params.getPassword()));
+                } else {
+                    jSch.addIdentity(UUID.randomUUID().toString(), params.getPrivateKey(), params.getPublicKey(),
+                            params.getPassphrase().getBytes());
+                    this.session = jSch.getSession(params.getUserName(), params.getHostName(),
+                            params.getPort());
+                    session.setUserInfo(new DefaultUserInfo(params.getUserName(), null, params.getPassphrase()));
+                }
+
+                if (params.isStrictHostKeyChecking()) {
+                    jSch.setKnownHosts(params.getKnownHostsFilePath());
+                } else {
+                    session.setConfig("StrictHostKeyChecking", "no");
+                }
+                session.connect(); // 0 connection timeout
+
+            } catch (JSchException e) {
+                throw new AgentException("Could not create ssh session for host " + params.getHostName(), e);
+            }
+        } else {
+            throw new AgentException("Unknown parameter type to ssh initialize agent adaptor. Required SshAdaptorParams type");
+        }
+
+    }
+
+    @Override
+    public void init(String computeResourceId, String gatewayId, String userId, String token) throws AgentException {
+        try {
+            this.appCatalog = RegistryFactory.getAppCatalog();
+            this.computeResourceDescription = this.appCatalog.getComputeResource().getComputeResource(computeResourceId);
+            List<JobSubmissionInterface> jobSubmissionInterfaces = this.computeResourceDescription.getJobSubmissionInterfaces();
+            Optional<JobSubmissionInterface> jobSubmissionInterfaceOp = jobSubmissionInterfaces.stream()
+                    .filter(iface -> JobSubmissionProtocol.SSH == iface.getJobSubmissionProtocol() ||
+                            JobSubmissionProtocol.SSH_FORK == iface.getJobSubmissionProtocol())
+                    .findFirst();
+
+            JobSubmissionInterface jobSubmissionInterface = jobSubmissionInterfaceOp.orElseThrow(() -> new AgentException("Could not find a Job submission interface with SSH"));
+
+            this.sshJobSubmission = this.appCatalog.getComputeResource().getSSHJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
+            this.resourceJobManager = sshJobSubmission.getResourceJobManager();
+
+            String jdbcUrl = ServerSettings.getCredentialStoreDBURL();
+            String jdbcUsr = ServerSettings.getCredentialStoreDBUser();
+            String jdbcPass = ServerSettings.getCredentialStoreDBPassword();
+            String driver = ServerSettings.getCredentialStoreDBDriver();
+            CredentialReaderImpl credentialReader = new CredentialReaderImpl(new DBUtil(jdbcUrl, jdbcUsr, jdbcPass, driver));
+            Credential credential = credentialReader.getCredential(gatewayId, token);
+
+            if (credential instanceof SSHCredential) {
+                SSHCredential sshCredential = SSHCredential.class.cast(credential);
+                SshAdaptorParams adaptorParams = new SshAdaptorParams();
+                adaptorParams.setHostName(this.computeResourceDescription.getHostName());
+                adaptorParams.setUserName(userId);
+                adaptorParams.setPassphrase(sshCredential.getPassphrase());
+                adaptorParams.setPrivateKey(sshCredential.getPrivateKey());
+                adaptorParams.setPublicKey(sshCredential.getPublicKey());
+                adaptorParams.setStrictHostKeyChecking(false);
+                init(adaptorParams);
+            }
+
+        } catch (AppCatalogException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (ApplicationSettingsException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (IllegalAccessException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (InstantiationException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (ClassNotFoundException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (CredentialStoreException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        }
+    }
+
+    public CommandOutput executeCommand(String command, String workingDirectory) throws AgentException {
+        StandardOutReader commandOutput = new StandardOutReader();
+        try {
+            ChannelExec channelExec = ((ChannelExec) session.openChannel("exec"));
+            channelExec.setCommand(command);
+            channelExec.setInputStream(null);
+            channelExec.setErrStream(commandOutput.getStandardError());
+            channelExec.connect();
+            commandOutput.onOutput(channelExec);
+            return commandOutput;
+        } catch (JSchException e) {
+            throw new AgentException(e);
+        }
+    }
+
+    public void createDirectory(String path) throws AgentException {
+        try {
+            String command = "mkdir -p " + path;
+            Channel channel = session.openChannel("exec");
+            StandardOutReader stdOutReader = new StandardOutReader();
+
+            ((ChannelExec) channel).setCommand(command);
+
+            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
+            try {
+                channel.connect();
+            } catch (JSchException e) {
+
+                channel.disconnect();
+                System.out.println("Unable to retrieve command output. Command - " + command +
+                        " on server - " + session.getHost() + ":" + session.getPort() +
+                        " connecting user name - "
+                        + session.getUserName());
+                throw new AgentException(e);
+            }
+            stdOutReader.onOutput(channel);
+            if (stdOutReader.getStdErrorString().contains("mkdir:")) {
+                throw new AgentException(stdOutReader.getStdErrorString());
+            }
+
+            channel.disconnect();
+        } catch (JSchException e) {
+            throw new AgentException(e);
+        }
+    }
+
+    public void copyFile(String localFile, String remoteFile) throws AgentException {
+        FileInputStream fis = null;
+        String prefix = null;
+        if (new File(localFile).isDirectory()) {
+            prefix = localFile + File.separator;
+        }
+        boolean ptimestamp = true;
+
+        try {
+            // exec 'scp -t rfile' remotely
+            String command = "scp " + (ptimestamp ? "-p" : "") + " -t " + remoteFile;
+            Channel channel = session.openChannel("exec");
+
+            StandardOutReader stdOutReader = new StandardOutReader();
+            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
+            ((ChannelExec) channel).setCommand(command);
+
+            // get I/O streams for remote scp
+            OutputStream out = channel.getOutputStream();
+            InputStream in = channel.getInputStream();
+
+            channel.connect();
+
+            if (checkAck(in) != 0) {
+                String error = "Error Reading input Stream";
+                //log.error(error);
+                throw new AgentException(error);
+            }
+
+            File _lfile = new File(localFile);
+
+            if (ptimestamp) {
+                command = "T" + (_lfile.lastModified() / 1000) + " 0";
+                // The access time should be sent here,
+                // but it is not accessible with JavaAPI ;-<
+                command += (" " + (_lfile.lastModified() / 1000) + " 0\n");
+                out.write(command.getBytes());
+                out.flush();
+                if (checkAck(in) != 0) {
+                    String error = "Error Reading input Stream";
+                    throw new AgentException(error);
+                }
+            }
+
+            // send "C0644 filesize filename", where filename should not include '/'
+            long filesize = _lfile.length();
+            command = "C0644 " + filesize + " ";
+            if (localFile.lastIndexOf('/') > 0) {
+                command += localFile.substring(localFile.lastIndexOf('/') + 1);
+            } else {
+                command += localFile;
+            }
+            command += "\n";
+            out.write(command.getBytes());
+            out.flush();
+            if (checkAck(in) != 0) {
+                String error = "Error Reading input Stream";
+                //log.error(error);
+                throw new AgentException(error);
+            }
+
+            // send a content of localFile
+            fis = new FileInputStream(localFile);
+            byte[] buf = new byte[1024];
+            while (true) {
+                int len = fis.read(buf, 0, buf.length);
+                if (len <= 0) break;
+                out.write(buf, 0, len); //out.flush();
+            }
+            fis.close();
+            fis = null;
+            // send '\0'
+            buf[0] = 0;
+            out.write(buf, 0, 1);
+            out.flush();
+            if (checkAck(in) != 0) {
+                String error = "Error Reading input Stream";
+                //log.error(error);
+                throw new AgentException(error);
+            }
+            out.close();
+            stdOutReader.onOutput(channel);
+
+
+            channel.disconnect();
+            if (stdOutReader.getStdErrorString().contains("scp:")) {
+                throw new AgentException(stdOutReader.getStdErrorString());
+            }
+            //since remote file is always a file  we just return the file
+            //return remoteFile;
+        } catch (JSchException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (IOException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        }
+    }
+
+    @Override
+    public List<String> listDirectory(String path) throws AgentException {
+
+        try {
+            String command = "ls " + path;
+            Channel channel = session.openChannel("exec");
+            StandardOutReader stdOutReader = new StandardOutReader();
+
+            ((ChannelExec) channel).setCommand(command);
+
+
+            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
+            try {
+                channel.connect();
+            } catch (JSchException e) {
+
+                channel.disconnect();
+//            session.disconnect();
+
+                throw new AgentException("Unable to retrieve command output. Command - " + command +
+                        " on server - " + session.getHost() + ":" + session.getPort() +
+                        " connecting user name - "
+                        + session.getUserName(), e);
+            }
+            stdOutReader.onOutput(channel);
+            stdOutReader.getStdOutputString();
+            if (stdOutReader.getStdErrorString().contains("ls:")) {
+                throw new AgentException(stdOutReader.getStdErrorString());
+            }
+            channel.disconnect();
+            return Arrays.asList(stdOutReader.getStdOutputString().split("\n"));
+
+        } catch (JSchException e) {
+            throw new AgentException(e);
+        }
+    }
+
+    private static class DefaultUserInfo implements UserInfo, UIKeyboardInteractive {
+
+        private String userName;
+        private String password;
+        private String passphrase;
+
+        public DefaultUserInfo(String userName, String password, String passphrase) {
+            this.userName = userName;
+            this.password = password;
+            this.passphrase = passphrase;
+        }
+
+        @Override
+        public String getPassphrase() {
+            return passphrase;
+        }
+
+        @Override
+        public String getPassword() {
+            return password;
+        }
+
+        @Override
+        public boolean promptPassword(String s) {
+            return true;
+        }
+
+        @Override
+        public boolean promptPassphrase(String s) {
+            return false;
+        }
+
+        @Override
+        public boolean promptYesNo(String s) {
+            return false;
+        }
+
+        @Override
+        public void showMessage(String s) {
+
+        }
+
+        @Override
+        public String[] promptKeyboardInteractive(String destination, String name, String instruction, String[] prompt, boolean[] echo) {
+            return new String[0];
+        }
+    }
+
+    class SftpUserInfo implements UserInfo {
+
+        String password = null;
+
+        public SftpUserInfo(String password) {
+            this.password = password;
+        }
+
+        @Override
+        public String getPassphrase() {
+            return null;
+        }
+
+        @Override
+        public String getPassword() {
+            return password;
+        }
+
+        public void setPassword(String passwd) {
+            password = passwd;
+        }
+
+        @Override
+        public boolean promptPassphrase(String message) {
+            return false;
+        }
+
+        @Override
+        public boolean promptPassword(String message) {
+            return false;
+        }
+
+        @Override
+        public boolean promptYesNo(String message) {
+            return true;
+        }
+
+        @Override
+        public void showMessage(String message) {
+        }
+    }
+
+    static int checkAck(InputStream in) throws IOException {
+        int b = in.read();
+        if (b == 0) return b;
+        if (b == -1) return b;
+
+        if (b == 1 || b == 2) {
+            StringBuffer sb = new StringBuffer();
+            int c;
+            do {
+                c = in.read();
+                sb.append((char) c);
+            }
+            while (c != '\n');
+            //FIXME: Redundant
+            if (b == 1) { // error
+                System.out.print(sb.toString());
+            }
+            if (b == 2) { // fatal error
+                System.out.print(sb.toString());
+            }
+            //log.warn(sb.toString());
+        }
+        return b;
+    }
+}
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java
new file mode 100644
index 0000000..49c036e
--- /dev/null
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java
@@ -0,0 +1,83 @@
+package org.apache.airavata.helix.agent.ssh;
+
+import com.jcraft.jsch.Channel;
+import org.apache.airavata.agents.api.CommandOutput;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class StandardOutReader implements CommandOutput {
+
+    // Todo improve this. We need to direct access of std out and exit code
+
+    String stdOutputString = null;
+    ByteArrayOutputStream errorStream = new ByteArrayOutputStream();
+    private int exitCode;
+
+    public void onOutput(Channel channel) {
+        try {
+            StringBuffer pbsOutput = new StringBuffer("");
+            InputStream inputStream =  channel.getInputStream();
+            byte[] tmp = new byte[1024];
+            do {
+                while (inputStream.available() > 0) {
+                    int i = inputStream.read(tmp, 0, 1024);
+                    if (i < 0) break;
+                    pbsOutput.append(new String(tmp, 0, i));
+                }
+            } while (!channel.isClosed()) ;
+            String output = pbsOutput.toString();
+            this.setStdOutputString(output);
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void exitCode(int code) {
+        System.out.println("Program exit code - " + code);
+        this.exitCode = code;
+    }
+
+    public int getExitCode() {
+        return exitCode;
+    }
+
+    public String getStdOutputString() {
+        return stdOutputString;
+    }
+
+    public void setStdOutputString(String stdOutputString) {
+        this.stdOutputString = stdOutputString;
+    }
+
+    public String getStdErrorString() {
+        return errorStream.toString();
+    }
+
+    public OutputStream getStandardError() {
+        return errorStream;
+    }
+
+    @Override
+    public String getStdOut() {
+        return null;
+    }
+
+    @Override
+    public String getStdError() {
+        return null;
+    }
+
+    @Override
+    public String getExitCommand() {
+        return null;
+    }
+}
diff --git a/modules/airavata-helix/pom.xml b/modules/airavata-helix/pom.xml
new file mode 100644
index 0000000..05938fd
--- /dev/null
+++ b/modules/airavata-helix/pom.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+    <packaging>pom</packaging>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>airavata-helix</artifactId>
+
+    <modules>
+        <module>agent-api</module>
+        <module>agent-impl</module>
+        <module>task-api</module>
+        <module>task-core</module>
+        <module>workflow-impl</module>
+    </modules>
+
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix/task-api/pom.xml b/modules/airavata-helix/task-api/pom.xml
new file mode 100644
index 0000000..41ec00c
--- /dev/null
+++ b/modules/airavata-helix/task-api/pom.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata-helix</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>task-api</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.helix</groupId>
+            <artifactId>helix-core</artifactId>
+            <version>0.6.7</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>agent-api</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+    <!--<build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.5.1</version>
+                <configuration>
+                    <source>${java.version}</source>
+                    <target>${java.version}</target>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>-->
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java
new file mode 100644
index 0000000..07de06e
--- /dev/null
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/TaskHelper.java
@@ -0,0 +1,13 @@
+package org.apache.airavata.helix.task.api;
+
+import org.apache.airavata.helix.task.api.support.AdaptorSupport;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public interface TaskHelper {
+    public AdaptorSupport getAdaptorSupport();
+}
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskDef.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskDef.java
new file mode 100644
index 0000000..3e4b7f1
--- /dev/null
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskDef.java
@@ -0,0 +1,18 @@
+package org.apache.airavata.helix.task.api.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface TaskDef {
+    public String name();
+}
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskOutPort.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskOutPort.java
new file mode 100644
index 0000000..a22c387
--- /dev/null
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskOutPort.java
@@ -0,0 +1,18 @@
+package org.apache.airavata.helix.task.api.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface TaskOutPort {
+    public String name();
+}
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskParam.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskParam.java
new file mode 100644
index 0000000..198b172
--- /dev/null
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/annotation/TaskParam.java
@@ -0,0 +1,20 @@
+package org.apache.airavata.helix.task.api.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface TaskParam {
+    public String name();
+    public String defaultValue() default "";
+    public boolean mandatory() default false;
+}
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
new file mode 100644
index 0000000..3e24aaa
--- /dev/null
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
@@ -0,0 +1,52 @@
+package org.apache.airavata.helix.task.api.support;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.CommandOutput;
+import org.apache.airavata.agents.api.JobSubmissionOutput;
+
+import java.io.File;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public interface AdaptorSupport {
+    public void initializeAdaptor();
+
+    public AgentAdaptor fetchAdaptor(String computeResource, String protocol, String authToken) throws Exception;
+
+
+    /**
+     *
+     * @param command
+     * @param workingDirectory
+     * @param computeResourceId
+     * @param protocol
+     * @param authToken
+     * @throws Exception
+     */
+    public CommandOutput executeCommand(String command, String workingDirectory, String computeResourceId, String protocol, String authToken) throws Exception;
+
+    /**
+     *
+     * @param path
+     * @param computeResourceId
+     * @param protocol
+     * @param authToken
+     * @throws Exception
+     */
+    public void createDirectory(String path, String computeResourceId, String protocol, String authToken) throws Exception;
+
+    /**
+     *
+     * @param sourceFile
+     * @param destinationFile
+     * @param computeResourceId
+     * @param protocol
+     * @param authToken
+     * @throws Exception
+     */
+    public void copyFile(String sourceFile, String destinationFile, String computeResourceId, String protocol, String authToken) throws Exception;
+}
diff --git a/modules/airavata-helix/task-api/src/main/resources/application.properties b/modules/airavata-helix/task-api/src/main/resources/application.properties
new file mode 100644
index 0000000..733515f
--- /dev/null
+++ b/modules/airavata-helix/task-api/src/main/resources/application.properties
@@ -0,0 +1,3 @@
+zookeeper.connection.url=localhost:2199
+helix.cluster.name=AiravataDemoCluster
+helix.controller.name=controller-1
\ No newline at end of file
diff --git a/modules/airavata-helix/task-api/src/main/resources/log4j.properties b/modules/airavata-helix/task-api/src/main/resources/log4j.properties
new file mode 100644
index 0000000..5e31e3c
--- /dev/null
+++ b/modules/airavata-helix/task-api/src/main/resources/log4j.properties
@@ -0,0 +1,9 @@
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=INFO, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
diff --git a/modules/airavata-helix/task-core/pom.xml b/modules/airavata-helix/task-core/pom.xml
new file mode 100644
index 0000000..df72dac
--- /dev/null
+++ b/modules/airavata-helix/task-core/pom.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata-helix</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>task-core</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.helix</groupId>
+            <artifactId>helix-core</artifactId>
+            <version>0.6.7</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>task-api</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>agent-api</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+    <!--<build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.5.1</version>
+                <configuration>
+                    <source>${java.version}</source>
+                    <target>${java.version}</target>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>-->
+
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java
new file mode 100644
index 0000000..04fa37f
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/AbstractTask.java
@@ -0,0 +1,108 @@
+package org.apache.airavata.helix.core;
+
+import org.apache.airavata.helix.core.util.TaskUtil;
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskParam;
+import org.apache.helix.HelixManager;
+import org.apache.helix.task.Task;
+import org.apache.helix.task.TaskCallbackContext;
+import org.apache.helix.task.TaskResult;
+import org.apache.helix.task.UserContentStore;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public abstract class AbstractTask extends UserContentStore implements Task {
+
+    private static final String NEXT_JOB = "next-job";
+    private static final String WORKFLOW_STARTED = "workflow-started";
+
+    @TaskParam(name = "taskId")
+    private String taskId;
+
+    private TaskCallbackContext callbackContext;
+    private TaskHelper taskHelper;
+
+    @Override
+    public void init(HelixManager manager, String workflowName, String jobName, String taskName) {
+        super.init(manager, workflowName, jobName, taskName);
+        try {
+            TaskUtil.deserializeTaskData(this, this.callbackContext.getTaskConfig().getConfigMap());
+        } catch (IllegalAccessException | InstantiationException e) {
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    public final TaskResult run() {
+        boolean isThisNextJob = getUserContent(WORKFLOW_STARTED, Scope.WORKFLOW) == null ||
+                this.callbackContext.getJobConfig().getJobId()
+                        .equals(this.callbackContext.getJobConfig().getWorkflow() + "_" + getUserContent(NEXT_JOB, Scope.WORKFLOW));
+        if (isThisNextJob) {
+            return onRun(this.taskHelper);
+        } else {
+            return new TaskResult(TaskResult.Status.COMPLETED, "Not a target job");
+        }
+    }
+
+    @Override
+    public final void cancel() {
+        onCancel();
+    }
+
+    public abstract TaskResult onRun(TaskHelper helper);
+
+    public abstract void onCancel();
+
+    protected void publishErrors(Throwable e) {
+        // TODO Publish through kafka channel with task and workflow id
+        e.printStackTrace();
+    }
+
+    public void sendNextJob(String jobId) {
+        putUserContent(WORKFLOW_STARTED, "TRUE", Scope.WORKFLOW);
+        if (jobId != null) {
+            putUserContent(NEXT_JOB, jobId, Scope.WORKFLOW);
+        }
+    }
+
+    protected void setContextVariable(String key, String value) {
+        putUserContent(key, value, Scope.WORKFLOW);
+    }
+
+    protected String getContextVariable(String key) {
+        return getUserContent(key, Scope.WORKFLOW);
+    }
+
+    // Getters and setters
+
+    public String getTaskId() {
+        return taskId;
+    }
+
+    public AbstractTask setTaskId(String taskId) {
+        this.taskId = taskId;
+        return this;
+    }
+
+    public TaskCallbackContext getCallbackContext() {
+        return callbackContext;
+    }
+
+    public AbstractTask setCallbackContext(TaskCallbackContext callbackContext) {
+        this.callbackContext = callbackContext;
+        return this;
+    }
+
+    public TaskHelper getTaskHelper() {
+        return taskHelper;
+    }
+
+    public AbstractTask setTaskHelper(TaskHelper taskHelper) {
+        this.taskHelper = taskHelper;
+        return this;
+    }
+}
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/OutPort.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/OutPort.java
new file mode 100644
index 0000000..99dc37c
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/OutPort.java
@@ -0,0 +1,44 @@
+package org.apache.airavata.helix.core;
+
+import org.apache.helix.task.TaskResult;
+import org.apache.helix.task.UserContentStore;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class OutPort {
+
+    private String nextJobId;
+    private AbstractTask task;
+
+    public OutPort(String nextJobId, AbstractTask task) {
+        this.nextJobId = nextJobId;
+        this.task = task;
+    }
+
+    public TaskResult invoke(TaskResult taskResult) {
+        task.sendNextJob(nextJobId);
+        return taskResult;
+    }
+
+    public String getNextJobId() {
+        return nextJobId;
+    }
+
+    public OutPort setNextJobId(String nextJobId) {
+        this.nextJobId = nextJobId;
+        return this;
+    }
+
+    public AbstractTask getTask() {
+        return task;
+    }
+
+    public OutPort setTask(AbstractTask task) {
+        this.task = task;
+        return this;
+    }
+}
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/controller/HelixController.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/controller/HelixController.java
new file mode 100644
index 0000000..cdc27f7
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/controller/HelixController.java
@@ -0,0 +1,91 @@
+package org.apache.airavata.helix.core.controller;
+
+import org.apache.airavata.helix.core.util.PropertyResolver;
+import org.apache.helix.controller.HelixControllerMain;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class HelixController implements Runnable {
+
+    private static final Logger logger = LogManager.getLogger(HelixController.class);
+
+    private String clusterName;
+    private String controllerName;
+    private String zkAddress;
+    private org.apache.helix.HelixManager zkHelixManager;
+
+    private CountDownLatch startLatch = new CountDownLatch(1);
+    private CountDownLatch stopLatch = new CountDownLatch(1);
+
+    public HelixController(String propertyFile) throws IOException {
+
+        PropertyResolver propertyResolver = new PropertyResolver();
+        propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
+
+        this.clusterName = propertyResolver.get("helix.cluster.name");
+        this.controllerName = propertyResolver.get("helix.controller.name");
+        this.zkAddress = propertyResolver.get("zookeeper.connection.url");
+    }
+
+    public void run() {
+        try {
+            zkHelixManager = HelixControllerMain.startHelixController(zkAddress, clusterName,
+                    controllerName, HelixControllerMain.STANDALONE);
+            startLatch.countDown();
+            stopLatch.await();
+        } catch (Exception ex) {
+            logger.error("Error in run() for Controller: " + controllerName + ", reason: " + ex, ex);
+        } finally {
+            disconnect();
+        }
+    }
+
+    public void start() {
+        new Thread(this).start();
+        try {
+            startLatch.await();
+            logger.info("Controller: " + controllerName + ", has connected to cluster: " + clusterName);
+
+            Runtime.getRuntime().addShutdownHook(
+                    new Thread() {
+                        @Override
+                        public void run() {
+                            disconnect();
+                        }
+                    }
+            );
+
+        } catch (InterruptedException ex) {
+            logger.error("Controller: " + controllerName + ", is interrupted! reason: " + ex, ex);
+        }
+    }
+
+    public void stop() {
+        stopLatch.countDown();
+    }
+
+    private void disconnect() {
+        if (zkHelixManager != null) {
+            logger.info("Controller: " + controllerName + ", has disconnected from cluster: " + clusterName);
+            zkHelixManager.disconnect();
+        }
+    }
+
+    public static void main(String args[]) {
+        try {
+            HelixController helixController = new HelixController("application.properties");
+            helixController.start();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
new file mode 100644
index 0000000..190b866
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
@@ -0,0 +1,171 @@
+package org.apache.airavata.helix.core.participant;
+
+import org.apache.airavata.helix.core.support.TaskHelperImpl;
+import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.helix.core.util.PropertyResolver;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.helix.InstanceType;
+import org.apache.helix.examples.OnlineOfflineStateModelFactory;
+import org.apache.helix.manager.zk.ZKHelixAdmin;
+import org.apache.helix.manager.zk.ZKHelixManager;
+import org.apache.helix.manager.zk.ZNRecordSerializer;
+import org.apache.helix.manager.zk.ZkClient;
+import org.apache.helix.model.BuiltInStateModelDefinitions;
+import org.apache.helix.model.InstanceConfig;
+import org.apache.helix.participant.StateMachineEngine;
+import org.apache.helix.task.Task;
+import org.apache.helix.task.TaskCallbackContext;
+import org.apache.helix.task.TaskFactory;
+import org.apache.helix.task.TaskStateModelFactory;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class HelixParticipant <T extends AbstractTask> implements Runnable {
+
+    private static final Logger logger = LogManager.getLogger(HelixParticipant.class);
+
+    private String zkAddress;
+    private String clusterName;
+    private String participantName;
+    private ZKHelixManager zkHelixManager;
+    private String taskTypeName;
+    private PropertyResolver propertyResolver;
+    private Class<T> taskClass;
+
+    public HelixParticipant(String propertyFile, Class<T> taskClass, String taskTypeName) throws IOException {
+
+        logger.info("Initializing Participant Node");
+
+        this.propertyResolver = new PropertyResolver();
+        propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
+
+        this.zkAddress = propertyResolver.get("zookeeper.connection.url");
+        this.clusterName = propertyResolver.get("helix.cluster.name");
+        this.participantName = propertyResolver.get("participant.name");
+        this.taskTypeName = taskTypeName;
+        this.taskClass = taskClass;
+
+        logger.info("Zookeper connection url " + zkAddress);
+        logger.info("Cluster name " + clusterName);
+        logger.info("Participant name " + participantName);
+        logger.info("Task type " + taskTypeName);
+        if (taskClass != null) {
+            logger.info("Task class " + taskClass.getCanonicalName());
+        }
+    }
+
+    public Map<String, TaskFactory> getTaskFactory() {
+        Map<String, TaskFactory> taskRegistry = new HashMap<String, TaskFactory>();
+
+        TaskFactory taskFac = new TaskFactory() {
+            public Task createNewTask(TaskCallbackContext context) {
+                try {
+                    return taskClass.newInstance()
+                            .setCallbackContext(context)
+                            .setTaskHelper(new TaskHelperImpl());
+                } catch (InstantiationException | IllegalAccessException e) {
+                    e.printStackTrace();
+                    return null;
+                }
+            }
+        };
+
+        TaskDef taskDef = taskClass.getAnnotation(TaskDef.class);
+        taskRegistry.put(taskDef.name(), taskFac);
+
+        return taskRegistry;
+    }
+
+    public void run() {
+        ZkClient zkClient = null;
+        try {
+            zkClient = new ZkClient(zkAddress, ZkClient.DEFAULT_SESSION_TIMEOUT,
+                    ZkClient.DEFAULT_CONNECTION_TIMEOUT, new ZNRecordSerializer());
+            ZKHelixAdmin zkHelixAdmin = new ZKHelixAdmin(zkClient);
+
+            List<String> nodesInCluster = zkHelixAdmin.getInstancesInCluster(clusterName);
+
+            if (!nodesInCluster.contains(participantName)) {
+                InstanceConfig instanceConfig = new InstanceConfig(participantName);
+                instanceConfig.setHostName("localhost");
+                instanceConfig.setInstanceEnabled(true);
+                if (taskTypeName != null) {
+                    instanceConfig.addTag(taskTypeName);
+                }
+                zkHelixAdmin.addInstance(clusterName, instanceConfig);
+                logger.debug("Instance: " + participantName + ", has been added to cluster: " + clusterName);
+            } else {
+                if (taskTypeName != null) {
+                    zkHelixAdmin.addInstanceTag(clusterName, participantName, taskTypeName);
+                }
+            }
+
+            Runtime.getRuntime().addShutdownHook(
+                    new Thread() {
+                        @Override
+                        public void run() {
+                            logger.debug("Participant: " + participantName + ", shutdown hook called.");
+                            disconnect();
+                        }
+                    }
+            );
+
+            // connect the participant manager
+            connect();
+        } catch (Exception ex) {
+            logger.error("Error in run() for Participant: " + participantName + ", reason: " + ex, ex);
+        } finally {
+            if (zkClient != null) {
+                zkClient.close();
+            }
+        }
+    }
+
+    private void connect() {
+        try {
+            zkHelixManager = new ZKHelixManager(clusterName, participantName, InstanceType.PARTICIPANT, zkAddress);
+            // register online-offline model
+            StateMachineEngine machineEngine = zkHelixManager.getStateMachineEngine();
+            OnlineOfflineStateModelFactory factory = new OnlineOfflineStateModelFactory(participantName);
+            machineEngine.registerStateModelFactory(BuiltInStateModelDefinitions.OnlineOffline.name(), factory);
+
+            // register task model
+            machineEngine.registerStateModelFactory("Task", new TaskStateModelFactory(zkHelixManager, getTaskFactory()));
+            logger.debug("Participant: " + participantName + ", registered state model factories.");
+
+            zkHelixManager.connect();
+            logger.info("Participant: " + participantName + ", has connected to cluster: " + clusterName);
+
+            Thread.currentThread().join();
+        } catch (InterruptedException ex) {
+            logger.error("Participant: " + participantName + ", is interrupted! reason: " + ex, ex);
+        }
+        catch (Exception ex) {
+            logger.error("Error in connect() for Participant: " + participantName + ", reason: " + ex, ex);
+        } finally {
+            disconnect();
+        }
+    }
+
+    private void disconnect() {
+        if (zkHelixManager != null) {
+            logger.info("Participant: " + participantName + ", has disconnected from cluster: " + clusterName);
+            zkHelixManager.disconnect();
+        }
+    }
+
+    public PropertyResolver getPropertyResolver() {
+        return propertyResolver;
+    }
+}
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
new file mode 100644
index 0000000..87a1e17
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
@@ -0,0 +1,47 @@
+package org.apache.airavata.helix.core.support;
+
+import org.apache.airavata.agents.api.*;
+import org.apache.airavata.helix.task.api.support.AdaptorSupport;
+
+import java.io.File;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class AdaptorSupportImpl implements AdaptorSupport {
+
+    private static AdaptorSupportImpl INSTANCE;
+
+    private final AgentStore agentStore = new AgentStore();
+
+    private AdaptorSupportImpl() {}
+
+    public synchronized static AdaptorSupportImpl getInstance() {
+        if (INSTANCE == null) {
+            INSTANCE = new AdaptorSupportImpl();
+        }
+        return INSTANCE;
+    }
+
+    public void initializeAdaptor() {
+    }
+
+    public CommandOutput executeCommand(String command, String workingDirectory, String computeResourceId, String protocol, String authToken) throws AgentException {
+        return fetchAdaptor(computeResourceId, protocol, authToken).executeCommand(command, workingDirectory);
+    }
+
+    public void createDirectory(String path, String computeResourceId, String protocol, String authToken) throws AgentException {
+        fetchAdaptor(computeResourceId, protocol, authToken).createDirectory(path);
+    }
+
+    public void copyFile(String sourceFile, String destinationFile, String computeResourceId, String protocol, String authToken) throws AgentException {
+        fetchAdaptor(computeResourceId, protocol, authToken).copyFile(sourceFile, destinationFile);
+    }
+
+    public AgentAdaptor fetchAdaptor(String computeResource, String protocol, String authToken) throws AgentException {
+         return agentStore.fetchAdaptor(computeResource, protocol, authToken);
+    }
+}
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java
new file mode 100644
index 0000000..77fc5ce
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/TaskHelperImpl.java
@@ -0,0 +1,16 @@
+package org.apache.airavata.helix.core.support;
+
+import org.apache.airavata.helix.task.api.TaskHelper;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class TaskHelperImpl implements TaskHelper {
+
+    public AdaptorSupportImpl getAdaptorSupport() {
+        return AdaptorSupportImpl.getInstance();
+    }
+}
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/PropertyResolver.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/PropertyResolver.java
new file mode 100644
index 0000000..4532345
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/PropertyResolver.java
@@ -0,0 +1,44 @@
+package org.apache.airavata.helix.core.util;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Optional;
+import java.util.Properties;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class PropertyResolver {
+    private Properties properties = new Properties();
+
+    public void loadFromFile(File propertyFile) throws IOException {
+        properties = new Properties();
+        properties.load(new FileInputStream(propertyFile));
+    }
+
+    public void loadInputStream(InputStream inputStream) throws IOException {
+        properties = new Properties();
+        properties.load(inputStream);
+    }
+
+    public String get(String key) {
+        if (properties.containsKey(key)) {
+            if (System.getenv(key.replace(".", "_")) != null) {
+                return System.getenv(key.replace(".", "_"));
+            } else {
+                return properties.getProperty(key);
+            }
+        } else {
+            return null;
+        }
+    }
+
+    public String get(String key, String defaultValue) {
+        return Optional.ofNullable(get(key)).orElse(defaultValue);
+    }
+}
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java
new file mode 100644
index 0000000..d0f1ab6
--- /dev/null
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java
@@ -0,0 +1,103 @@
+package org.apache.airavata.helix.core.util;
+
+import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.helix.core.OutPort;
+import org.apache.airavata.helix.task.api.annotation.TaskOutPort;
+import org.apache.airavata.helix.task.api.annotation.TaskParam;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class TaskUtil {
+
+    public static <T extends AbstractTask> List<OutPort> getOutPortsOfTask(T task) throws IllegalAccessException {
+        Field[] fields = task.getClass().getDeclaredFields();
+        List<OutPort> outPorts = new ArrayList<>();
+        for (Field field : fields) {
+            TaskOutPort outPortAnnotation = field.getAnnotation(TaskOutPort.class);
+            if (outPortAnnotation != null) {
+                field.setAccessible(true);
+                OutPort outPort = (OutPort) field.get(task);
+                outPorts.add(outPort);
+            }
+        }
+        return outPorts;
+    }
+
+    public static <T extends AbstractTask> Map<String, String> serializeTaskData(T data) throws IllegalAccessException {
+
+        Map<String, String> result = new HashMap<>();
+        for (Class<?> c = data.getClass(); c != null; c = c.getSuperclass()) {
+            Field[] fields = c.getDeclaredFields();
+            for (Field classField : fields) {
+                TaskParam parm = classField.getAnnotation(TaskParam.class);
+                if (parm != null) {
+                    classField.setAccessible(true);
+                    result.put(parm.name(), classField.get(data).toString());
+                }
+
+                TaskOutPort outPort = classField.getAnnotation(TaskOutPort.class);
+                if (outPort != null) {
+                    classField.setAccessible(true);
+                    if (classField.get(data) != null) {
+                        result.put(outPort.name(), ((OutPort) classField.get(data)).getNextJobId().toString());
+                    }
+                }
+            }
+        }
+        return result;
+    }
+
+    public static <T extends AbstractTask> void deserializeTaskData(T instance, Map<String, String> params) throws IllegalAccessException, InstantiationException {
+
+        List<Field> allFields = new ArrayList<>();
+        Class genericClass = instance.getClass();
+
+        while (AbstractTask.class.isAssignableFrom(genericClass)) {
+            Field[] declaredFields = genericClass.getDeclaredFields();
+            for (Field declaredField : declaredFields) {
+                allFields.add(declaredField);
+            }
+            genericClass = genericClass.getSuperclass();
+        }
+
+        for (Field classField : allFields) {
+            TaskParam param = classField.getAnnotation(TaskParam.class);
+            if (param != null) {
+                if (params.containsKey(param.name())) {
+                    classField.setAccessible(true);
+                    if (classField.getType().isAssignableFrom(String.class)) {
+                        classField.set(instance, params.get(param.name()));
+                    } else if (classField.getType().isAssignableFrom(Integer.class)) {
+                        classField.set(instance, Integer.parseInt(params.get(param.name())));
+                    } else if (classField.getType().isAssignableFrom(Long.class)) {
+                        classField.set(instance, Long.parseLong(params.get(param.name())));
+                    } else if (classField.getType().isAssignableFrom(Boolean.class)) {
+                        classField.set(instance, Boolean.parseBoolean(params.get(param.name())));
+                    }
+                }
+            }
+        }
+
+        for (Field classField : allFields) {
+            TaskOutPort outPort = classField.getAnnotation(TaskOutPort.class);
+            if (outPort != null) {
+                classField.setAccessible(true);
+                if (params.containsKey(outPort.name())) {
+                    classField.set(instance, new OutPort(params.get(outPort.name()), instance));
+                } else {
+                    classField.set(instance, new OutPort(null, instance));
+                }
+            }
+        }
+    }
+}
diff --git a/modules/airavata-helix/workflow-impl/pom.xml b/modules/airavata-helix/workflow-impl/pom.xml
new file mode 100644
index 0000000..03f324e
--- /dev/null
+++ b/modules/airavata-helix/workflow-impl/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata-helix</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>workflow-impl</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>task-core</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>task-api</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+<!--
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.5.1</version>
+                <configuration>
+                    <source>${java.version}</source>
+                    <target>${java.version}</target>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+-->
+
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/SimpleWorkflow.java b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/SimpleWorkflow.java
new file mode 100644
index 0000000..d212f91
--- /dev/null
+++ b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/SimpleWorkflow.java
@@ -0,0 +1,40 @@
+package org.apache.airavata.helix.workflow;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class SimpleWorkflow {
+
+    public static void main(String[] args) throws Exception {
+        WorkflowManager wm = new WorkflowManager("AiravataDemoCluster", "WorkflowManager", "localhost:2199");
+
+        /*MkdirTask mkdirTask1 = new MkdirTask();
+        mkdirTask1.setDirName("/tmp/newdir");
+        mkdirTask1.setComputeResourceId("localhost");
+        mkdirTask1.setTaskId("task1");
+
+        MkdirTask mkdirTask2 = new MkdirTask();
+        mkdirTask2.setDirName("/tmp/newdir2");
+        mkdirTask2.setComputeResourceId("localhost");
+        mkdirTask2.setTaskId("task2");
+
+        CommandTask commandTask1 = new CommandTask();
+        commandTask1.setCommand("touch /tmp/newdir/a.txt");
+        commandTask1.setWorkingDirectory("/tmp");
+        commandTask1.setComputeResource("localhost");
+        commandTask1.setTaskId("task3");
+
+        mkdirTask1.setSuccessPort(new OutPort("task2", mkdirTask1));
+        mkdirTask2.setSuccessPort(new OutPort("task3", mkdirTask2));
+
+        List<AbstractTask> allTasks = new ArrayList<>();
+        allTasks.add(mkdirTask2);
+        allTasks.add(mkdirTask1);
+        allTasks.add(commandTask1);
+
+        wm.launchWorkflow(UUID.randomUUID().toString(), allTasks);*/
+    }
+}
diff --git a/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
new file mode 100644
index 0000000..ab7e3c4
--- /dev/null
+++ b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
@@ -0,0 +1,94 @@
+package org.apache.airavata.helix.workflow;
+
+import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.helix.core.OutPort;
+import org.apache.airavata.helix.core.util.*;
+import org.apache.airavata.helix.core.util.TaskUtil;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.helix.HelixManager;
+import org.apache.helix.HelixManagerFactory;
+import org.apache.helix.InstanceType;
+import org.apache.helix.task.*;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * TODO: Class level comments please
+ *
+ * @author dimuthu
+ * @since 1.0.0-SNAPSHOT
+ */
+public class WorkflowManager {
+
+    private static final String WORKFLOW_PREFIX = "Workflow_of_process_";
+    private TaskDriver taskDriver;
+
+    public WorkflowManager(String helixClusterName, String instanceName, String zkConnectionString) throws Exception {
+
+        HelixManager helixManager = HelixManagerFactory.getZKHelixManager(helixClusterName, instanceName,
+                InstanceType.SPECTATOR, zkConnectionString);
+        helixManager.connect();
+
+        Runtime.getRuntime().addShutdownHook(
+                new Thread() {
+                    @Override
+                    public void run() {
+                        helixManager.disconnect();
+                    }
+                }
+        );
+
+        taskDriver = new TaskDriver(helixManager);
+    }
+
+    public void launchWorkflow(String processId, List<AbstractTask> tasks, boolean globalParticipant) throws Exception {
+
+        Workflow.Builder workflowBuilder = new Workflow.Builder(WORKFLOW_PREFIX + processId).setExpiry(0);
+
+        for (int i = 0; i < tasks.size(); i++) {
+            AbstractTask data = tasks.get(i);
+            String taskType = data.getClass().getAnnotation(TaskDef.class).name();
+            TaskConfig.Builder taskBuilder = new TaskConfig.Builder().setTaskId("Task_" + data.getTaskId())
+                    .setCommand(taskType);
+            Map<String, String> paramMap = org.apache.airavata.helix.core.util.TaskUtil.serializeTaskData(data);
+            paramMap.forEach(taskBuilder::addConfig);
+
+            List<TaskConfig> taskBuilds = new ArrayList<>();
+            taskBuilds.add(taskBuilder.build());
+
+            JobConfig.Builder job = new JobConfig.Builder()
+                    .addTaskConfigs(taskBuilds)
+                    .setFailureThreshold(0)
+                    .setMaxAttemptsPerTask(3);
+
+            if (!globalParticipant) {
+                job.setInstanceGroupTag(taskType);
+            }
+
+            workflowBuilder.addJob((data.getTaskId()), job);
+
+            List<OutPort> outPorts = TaskUtil.getOutPortsOfTask(data);
+            outPorts.forEach(outPort -> {
+                if (outPort != null) {
+                    workflowBuilder.addParentChildDependency(data.getTaskId(), outPort.getNextJobId());
+                }
+            });
+        }
+
+        WorkflowConfig.Builder config = new WorkflowConfig.Builder().setFailureThreshold(0);
+        workflowBuilder.setWorkflowConfig(config.build());
+        Workflow workflow = workflowBuilder.build();
+
+        taskDriver.start(workflow);
+
+        //TODO : Do we need to monitor workflow status? If so how do we do it in a scalable manner? For example,
+        // if the hfac that monitors a particular workflow, got killed due to some reason, who is taking the responsibility
+
+        TaskState taskState = taskDriver.pollForWorkflowState(workflow.getName(),
+                TaskState.COMPLETED, TaskState.FAILED, TaskState.STOPPED, TaskState.ABORTED);
+        System.out.println("Workflow finished with state " + taskState.name());
+
+    }
+}
\ No newline at end of file
diff --git a/modules/helix-spectator/pom.xml b/modules/helix-spectator/pom.xml
new file mode 100644
index 0000000..bae2785
--- /dev/null
+++ b/modules/helix-spectator/pom.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>helix-spectator</artifactId>
+
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>task-core</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-registry-cpi</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-registry-core</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-messaging-core</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>workflow-impl</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <!-- https://mvnrepository.com/artifact/org.mariadb.jdbc/mariadb-java-client -->
+        <dependency>
+            <groupId>org.mariadb.jdbc</groupId>
+            <artifactId>mariadb-java-client</artifactId>
+            <version>1.1.7</version>
+        </dependency>
+
+    </dependencies>
+</project>
\ No newline at end of file
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
new file mode 100644
index 0000000..f0e166b
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
@@ -0,0 +1,68 @@
+package org.apache.airavata.helix.impl.participant;
+
+import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.helix.core.participant.HelixParticipant;
+import org.apache.airavata.helix.core.support.TaskHelperImpl;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.helix.task.Task;
+import org.apache.helix.task.TaskCallbackContext;
+import org.apache.helix.task.TaskFactory;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+public class GlobalParticipant extends HelixParticipant {
+
+    private String[] taskClasses = {
+        "org.apache.airavata.helix.impl.task.EnvSetupTask",
+        "org.apache.airavata.helix.impl.task.DataStagingTask",
+        "org.apache.airavata.helix.impl.task.submission.task.ForkJobSubmissionTask",
+        "org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask",
+        "org.apache.airavata.helix.impl.task.submission.task.LocalJobSubmissionTask"
+    };
+
+    public Map<String, TaskFactory> getTaskFactory() {
+        Map<String, TaskFactory> taskRegistry = new HashMap<String, TaskFactory>();
+
+        for (String taskClass : taskClasses) {
+            TaskFactory taskFac = new TaskFactory() {
+                public Task createNewTask(TaskCallbackContext context) {
+                    try {
+                        return AbstractTask.class.cast(Class.forName(taskClass).newInstance())
+                                .setCallbackContext(context)
+                                .setTaskHelper(new TaskHelperImpl());
+                    } catch (InstantiationException | IllegalAccessException e) {
+                        e.printStackTrace();
+                        return null;
+                    } catch (ClassNotFoundException e) {
+                        e.printStackTrace();
+                        return null;
+                    }
+                }
+            };
+
+            TaskDef taskDef = null;
+            try {
+                taskDef = Class.forName(taskClass).getAnnotation(TaskDef.class);
+                taskRegistry.put(taskDef.name(), taskFac);
+            } catch (ClassNotFoundException e) {
+                e.printStackTrace();
+            }
+        }
+
+
+        return taskRegistry;
+    }
+
+    public GlobalParticipant(String propertyFile, Class taskClass, String taskTypeName) throws IOException {
+        super(propertyFile, taskClass, taskTypeName);
+    }
+
+    public static void main(String args[]) throws IOException {
+        GlobalParticipant participant = new GlobalParticipant("application.properties", null, null);
+        Thread t = new Thread(participant);
+        t.start();
+    }
+
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
new file mode 100644
index 0000000..72d3e17
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
@@ -0,0 +1,293 @@
+package org.apache.airavata.helix.impl.task;
+
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.helix.core.OutPort;
+import org.apache.airavata.helix.task.api.annotation.TaskOutPort;
+import org.apache.airavata.helix.task.api.annotation.TaskParam;
+import org.apache.airavata.messaging.core.MessageContext;
+import org.apache.airavata.messaging.core.Publisher;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
+import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
+import org.apache.airavata.model.messaging.event.MessageType;
+import org.apache.airavata.model.messaging.event.TaskIdentifier;
+import org.apache.airavata.model.messaging.event.TaskStatusChangeEvent;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.status.TaskState;
+import org.apache.airavata.model.status.TaskStatus;
+import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
+import org.apache.airavata.registry.cpi.*;
+import org.apache.helix.HelixManager;
+import org.apache.helix.task.TaskResult;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+import java.util.*;
+
+public abstract class AiravataTask extends AbstractTask {
+
+    private static final Logger logger = LogManager.getLogger(AiravataTask.class);
+
+    private AppCatalog appCatalog;
+    private ExperimentCatalog experimentCatalog;
+    private Publisher statusPublisher;
+    private ProcessModel processModel;
+
+    private ComputeResourceDescription computeResourceDescription;
+    private ComputeResourcePreference gatewayComputeResourcePreference;
+    private UserComputeResourcePreference userComputeResourcePreference;
+    private UserResourceProfile userResourceProfile;
+    private GatewayResourceProfile gatewayResourceProfile;
+
+    @TaskParam(name = "Process Id")
+    private String processId;
+
+    @TaskParam(name = "experimentId")
+    private String experimentId;
+
+    @TaskParam(name = "gatewayId")
+    private String gatewayId;
+
+    @TaskOutPort(name = "Success Port")
+    private OutPort onSuccess;
+
+
+    protected TaskResult onSuccess(String message) {
+        String successMessage = "Task " + getTaskId() + " completed." + message != null ? " Message : " + message : "";
+        logger.info(successMessage);
+        return onSuccess.invoke(new TaskResult(TaskResult.Status.COMPLETED, message));
+    }
+
+    protected TaskResult onFail(String reason, boolean fatal, Throwable error) {
+        String errorMessage;
+
+        if (error == null) {
+            errorMessage = "Task " + getTaskId() + " failed due to " + reason;
+            logger.error(errorMessage);
+        } else {
+            errorMessage = "Task " + getTaskId() + " failed due to " + reason + ", " + error.getMessage();
+            logger.error(errorMessage, error);
+        }
+        return new TaskResult(fatal ? TaskResult.Status.FATAL_FAILED : TaskResult.Status.FAILED, errorMessage);
+
+    }
+
+    @Override
+    public void init(HelixManager manager, String workflowName, String jobName, String taskName) {
+        super.init(manager, workflowName, jobName, taskName);
+        try {
+            appCatalog = RegistryFactory.getAppCatalog();
+            experimentCatalog = RegistryFactory.getDefaultExpCatalog();
+            processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
+
+            this.computeResourceDescription = getAppCatalog().getComputeResource().getComputeResource(getProcessModel()
+                    .getComputeResourceId());
+            this.gatewayComputeResourcePreference = getAppCatalog().getGatewayProfile()
+                    .getComputeResourcePreference(getGatewayId(), computeResourceDescription.getComputeResourceId());
+
+            this.userComputeResourcePreference = getAppCatalog().getUserResourceProfile()
+                    .getUserComputeResourcePreference(getProcessModel().getUserName(), getGatewayId(), getProcessModel()
+                            .getComputeResourceId());
+
+            this.userResourceProfile = getAppCatalog().getUserResourceProfile()
+                    .getUserResourceProfile(getProcessModel().getUserName(), getGatewayId());
+
+            this.gatewayResourceProfile = getAppCatalog().getGatewayProfile().getGatewayProfile(getGatewayId());
+
+        } catch (AppCatalogException e) {
+            e.printStackTrace();
+        } catch (RegistryException e) {
+            e.printStackTrace();
+        }
+    }
+
+    protected AppCatalog getAppCatalog() {
+        return appCatalog;
+    }
+
+    protected void publishTaskState(TaskState ts) throws RegistryException {
+
+        TaskStatus taskStatus = new TaskStatus();
+        taskStatus.setState(ts);
+        taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+        experimentCatalog.add(ExpCatChildDataType.TASK_STATUS, taskStatus, getTaskId());
+        TaskIdentifier identifier = new TaskIdentifier(getTaskId(),
+                getProcessId(), getExperimentId(), getGatewayId());
+        TaskStatusChangeEvent taskStatusChangeEvent = new TaskStatusChangeEvent(ts,
+                identifier);
+        MessageContext msgCtx = new MessageContext(taskStatusChangeEvent, MessageType.TASK, AiravataUtils.getId
+                (MessageType.TASK.name()), getGatewayId());
+        msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+    }
+
+
+    ///////////////////
+
+    public String getComputeResourceId() {
+        if (isUseUserCRPref() &&
+                userComputeResourcePreference != null &&
+                isValid(userComputeResourcePreference.getComputeResourceId())) {
+            return userComputeResourcePreference.getComputeResourceId();
+        } else {
+            return gatewayComputeResourcePreference.getComputeResourceId();
+        }
+    }
+
+    public String getComputeResourceCredentialToken(){
+        if (isUseUserCRPref()) {
+            if (userComputeResourcePreference != null &&
+                    isValid(userComputeResourcePreference.getResourceSpecificCredentialStoreToken())) {
+                return userComputeResourcePreference.getResourceSpecificCredentialStoreToken();
+            } else {
+                return userResourceProfile.getCredentialStoreToken();
+            }
+        } else {
+            if (isValid(gatewayComputeResourcePreference.getResourceSpecificCredentialStoreToken())) {
+                return gatewayComputeResourcePreference.getResourceSpecificCredentialStoreToken();
+            } else {
+                return gatewayResourceProfile.getCredentialStoreToken();
+            }
+        }
+    }
+
+    public String getComputeResourceLoginUserName(){
+        if (isUseUserCRPref() &&
+                userComputeResourcePreference != null &&
+                isValid(userComputeResourcePreference.getLoginUserName())) {
+            return userComputeResourcePreference.getLoginUserName();
+        } else if (isValid(getProcessModel().getProcessResourceSchedule().getOverrideLoginUserName())) {
+            return getProcessModel().getProcessResourceSchedule().getOverrideLoginUserName();
+        } else {
+            return gatewayComputeResourcePreference.getLoginUserName();
+        }
+    }
+
+    public JobSubmissionInterface getPreferredJobSubmissionInterface() throws AppCatalogException {
+        try {
+            JobSubmissionProtocol preferredJobSubmissionProtocol = getJobSubmissionProtocol();
+            ComputeResourceDescription resourceDescription = getComputeResourceDescription();
+            List<JobSubmissionInterface> jobSubmissionInterfaces = resourceDescription.getJobSubmissionInterfaces();
+            Map<JobSubmissionProtocol, List<JobSubmissionInterface>> orderedInterfaces = new HashMap<>();
+            List<JobSubmissionInterface> interfaces = new ArrayList<>();
+            if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()) {
+                for (JobSubmissionInterface submissionInterface : jobSubmissionInterfaces){
+
+                    if (preferredJobSubmissionProtocol != null){
+                        if (preferredJobSubmissionProtocol.toString().equals(submissionInterface.getJobSubmissionProtocol().toString())){
+                            if (orderedInterfaces.containsKey(submissionInterface.getJobSubmissionProtocol())){
+                                List<JobSubmissionInterface> interfaceList = orderedInterfaces.get(submissionInterface.getJobSubmissionProtocol());
+                                interfaceList.add(submissionInterface);
+                            }else {
+                                interfaces.add(submissionInterface);
+                                orderedInterfaces.put(submissionInterface.getJobSubmissionProtocol(), interfaces);
+                            }
+                        }
+                    }else {
+                        Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
+                            @Override
+                            public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
+                                return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
+                            }
+                        });
+                    }
+                }
+                interfaces = orderedInterfaces.get(preferredJobSubmissionProtocol);
+                Collections.sort(interfaces, new Comparator<JobSubmissionInterface>() {
+                    @Override
+                    public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
+                        return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
+                    }
+                });
+            } else {
+                throw new AppCatalogException("Compute resource should have at least one job submission interface defined...");
+            }
+            return interfaces.get(0);
+        } catch (AppCatalogException e) {
+            throw new AppCatalogException("Error occurred while retrieving data from app catalog", e);
+        }
+    }
+
+    //////////////////////////
+
+
+    protected boolean isValid(String str) {
+        return str != null && !str.trim().isEmpty();
+    }
+
+    public boolean isUseUserCRPref() {
+        return getProcessModel().isUseUserCRPref();
+    }
+
+    public JobSubmissionProtocol getJobSubmissionProtocol() {
+        return getGatewayComputeResourcePreference().getPreferredJobSubmissionProtocol();
+    }
+
+    public ComputeResourcePreference getGatewayComputeResourcePreference() {
+        return gatewayComputeResourcePreference;
+    }
+
+
+    public ComputeResourceDescription getComputeResourceDescription() {
+        return computeResourceDescription;
+    }
+
+    ////////////////////////
+
+    
+    public void setAppCatalog(AppCatalog appCatalog) {
+        this.appCatalog = appCatalog;
+    }
+
+    public ExperimentCatalog getExperimentCatalog() {
+        return experimentCatalog;
+    }
+
+    public void setExperimentCatalog(ExperimentCatalog experimentCatalog) {
+        this.experimentCatalog = experimentCatalog;
+    }
+
+    public Publisher getStatusPublisher() {
+        return statusPublisher;
+    }
+
+    public void setStatusPublisher(Publisher statusPublisher) {
+        this.statusPublisher = statusPublisher;
+    }
+
+    public String getProcessId() {
+        return processId;
+    }
+
+    public void setProcessId(String processId) {
+        this.processId = processId;
+    }
+
+    public String getExperimentId() {
+        return experimentId;
+    }
+
+    public void setExperimentId(String experimentId) {
+        this.experimentId = experimentId;
+    }
+
+    public String getGatewayId() {
+        return gatewayId;
+    }
+
+    public void setGatewayId(String gatewayId) {
+        this.gatewayId = gatewayId;
+    }
+
+    public ProcessModel getProcessModel() {
+        return processModel;
+    }
+
+    public void setProcessModel(ProcessModel processModel) {
+        this.processModel = processModel;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java
new file mode 100644
index 0000000..346aa73
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/DataStagingTask.java
@@ -0,0 +1,19 @@
+package org.apache.airavata.helix.impl.task;
+
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.helix.task.TaskResult;
+
+@TaskDef(name = "Data Staging Task")
+public class DataStagingTask extends AiravataTask {
+
+    @Override
+    public TaskResult onRun(TaskHelper taskHelper) {
+        return null;
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
new file mode 100644
index 0000000..1cab0e2
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
@@ -0,0 +1,64 @@
+package org.apache.airavata.helix.impl.task;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.helix.core.OutPort;
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.helix.task.api.annotation.TaskOutPort;
+import org.apache.airavata.helix.task.api.annotation.TaskParam;
+import org.apache.airavata.model.status.TaskState;
+import org.apache.airavata.registry.cpi.RegistryException;
+import org.apache.helix.task.TaskResult;
+
+@TaskDef(name = "Environment Setup Task")
+public class EnvSetupTask extends AiravataTask {
+
+    @TaskParam(name = "Working Directory")
+    private String workingDirectory;
+
+    @TaskOutPort(name = "Success Out Port")
+    private OutPort successPort;
+
+    @Override
+    public TaskResult onRun(TaskHelper taskHelper) {
+        try {
+            publishTaskState(TaskState.EXECUTING);
+            AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
+                    getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
+
+            adaptor.createDirectory(workingDirectory);
+            publishTaskState(TaskState.COMPLETED);
+            return successPort.invoke(new TaskResult(TaskResult.Status.COMPLETED, "Successfully completed"));
+        } catch (Exception e) {
+            try {
+                publishTaskState(TaskState.FAILED);
+            } catch (RegistryException e1) {
+                publishErrors(e1);
+                // ignore silently
+            }
+            publishErrors(e);
+            return new TaskResult(TaskResult.Status.FAILED, "Failed the task");
+        }
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+
+    public String getWorkingDirectory() {
+        return workingDirectory;
+    }
+
+    public void setWorkingDirectory(String workingDirectory) {
+        this.workingDirectory = workingDirectory;
+    }
+
+    public OutPort getSuccessPort() {
+        return successPort;
+    }
+
+    public void setSuccessPort(OutPort successPort) {
+        this.successPort = successPort;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
new file mode 100644
index 0000000..ec75fb7
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
@@ -0,0 +1,415 @@
+package org.apache.airavata.helix.impl.task.submission;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class GroovyMapData {
+
+    @ScriptTag(name = "inputDir")
+    private String inputDir;
+
+    @ScriptTag(name = "outputDir")
+    private String outputDir;
+
+    @ScriptTag(name = "executablePath")
+    private String executablePath;
+
+    @ScriptTag(name = "standardOutFile")
+    private String stdoutFile;
+
+    @ScriptTag(name = "standardErrorFile")
+    private String stderrFile;
+
+    @ScriptTag(name = "scratchLocation")
+    private String scratchLocation;
+
+    @ScriptTag(name = "gatewayId")
+    private String gatewayId;
+
+    @ScriptTag(name = "gatewayUserName")
+    private String gatewayUserName;
+
+    @ScriptTag(name = "applicationName")
+    private String applicationName;
+
+    @ScriptTag(name = "queueSpecificMacros")
+    private String queueSpecificMacros;
+
+    @ScriptTag(name = "accountString")
+    private String accountString;
+
+    @ScriptTag(name = "reservation")
+    private String reservation;
+
+    @ScriptTag(name = "jobName")
+    private String jobName;
+
+    @ScriptTag(name = "workingDirectory")
+    private String workingDirectory;
+
+    @ScriptTag(name = "inputs")
+    private List<String> inputs;
+
+    @ScriptTag(name = "inputsAll")
+    private List<String> inputsAll;
+
+    @ScriptTag(name = "userName")
+    private String userName;
+
+    @ScriptTag(name = "shellName")
+    private String shellName;
+
+    @ScriptTag(name = "maxWallTime")
+    private String maxWallTime;
+
+    @ScriptTag(name = "qualityOfService")
+    private String qualityOfService;
+
+    @ScriptTag(name = "queueName")
+    private String queueName;
+
+    @ScriptTag(name = "nodes")
+    private Integer nodes;
+
+    @ScriptTag(name = "processPerNode")
+    private Integer processPerNode;
+
+    @ScriptTag(name = "cpuCount")
+    private Integer cpuCount;
+
+    @ScriptTag(name = "usedMem")
+    private Integer usedMem;
+
+    @ScriptTag(name = "mailAddress")
+    private String mailAddress;
+
+    @ScriptTag(name = "exports")
+    private List<String> exports;
+
+    @ScriptTag(name = "moduleCommands")
+    private List<String> moduleCommands;
+
+    @ScriptTag(name = "preJobCommands")
+    private List<String> preJobCommands;
+
+    @ScriptTag(name = "postJobCommands")
+    private List<String> postJobCommands;
+
+    @ScriptTag(name = "jobSubmitterCommand")
+    private String jobSubmitterCommand;
+
+    @ScriptTag(name = "chassisName")
+    private String chassisName;
+
+
+    public Map<String, Object> getMap() {
+
+        Map<String, Object> map = new HashMap<>();
+        Field[] fields = this.getClass().getDeclaredFields();
+
+        for (Field field : fields) {
+            ScriptTag scriptTag = field.getAnnotation(ScriptTag.class);
+            if (scriptTag != null) {
+                field.setAccessible(true);
+                try {
+                    map.put(scriptTag.name(), field.get(this));
+                } catch (IllegalAccessException e) {
+                    e.printStackTrace();
+                    // ignore silently
+                }
+            }
+        }
+
+        return map;
+    }
+
+    public String getInputDir() {
+        return inputDir;
+    }
+
+    public GroovyMapData setInputDir(String inputDir) {
+        this.inputDir = inputDir;
+        return this;
+    }
+
+    public String getOutputDir() {
+        return outputDir;
+    }
+
+    public GroovyMapData setOutputDir(String outputDir) {
+        this.outputDir = outputDir;
+        return this;
+    }
+
+    public String getExecutablePath() {
+        return executablePath;
+    }
+
+    public GroovyMapData setExecutablePath(String executablePath) {
+        this.executablePath = executablePath;
+        return this;
+    }
+
+    public String getStdoutFile() {
+        return stdoutFile;
+    }
+
+    public GroovyMapData setStdoutFile(String stdoutFile) {
+        this.stdoutFile = stdoutFile;
+        return this;
+    }
+
+    public String getStderrFile() {
+        return stderrFile;
+    }
+
+    public GroovyMapData setStderrFile(String stderrFile) {
+        this.stderrFile = stderrFile;
+        return this;
+    }
+
+    public String getScratchLocation() {
+        return scratchLocation;
+    }
+
+    public GroovyMapData setScratchLocation(String scratchLocation) {
+        this.scratchLocation = scratchLocation;
+        return this;
+    }
+
+    public String getGatewayId() {
+        return gatewayId;
+    }
+
+    public GroovyMapData setGatewayId(String gatewayId) {
+        this.gatewayId = gatewayId;
+        return this;
+    }
+
+    public String getGatewayUserName() {
+        return gatewayUserName;
+    }
+
+    public GroovyMapData setGatewayUserName(String gatewayUserName) {
+        this.gatewayUserName = gatewayUserName;
+        return this;
+    }
+
+    public String getApplicationName() {
+        return applicationName;
+    }
+
+    public GroovyMapData setApplicationName(String applicationName) {
+        this.applicationName = applicationName;
+        return this;
+    }
+
+    public String getQueueSpecificMacros() {
+        return queueSpecificMacros;
+    }
+
+    public GroovyMapData setQueueSpecificMacros(String queueSpecificMacros) {
+        this.queueSpecificMacros = queueSpecificMacros;
+        return this;
+    }
+
+    public String getAccountString() {
+        return accountString;
+    }
+
+    public GroovyMapData setAccountString(String accountString) {
+        this.accountString = accountString;
+        return this;
+    }
+
+    public String getReservation() {
+        return reservation;
+    }
+
+    public GroovyMapData setReservation(String reservation) {
+        this.reservation = reservation;
+        return this;
+    }
+
+    public String getJobName() {
+        return jobName;
+    }
+
+    public GroovyMapData setJobName(String jobName) {
+        this.jobName = jobName;
+        return this;
+    }
+
+    public String getWorkingDirectory() {
+        return workingDirectory;
+    }
+
+    public GroovyMapData setWorkingDirectory(String workingDirectory) {
+        this.workingDirectory = workingDirectory;
+        return this;
+    }
+
+    public List<String> getInputs() {
+        return inputs;
+    }
+
+    public GroovyMapData setInputs(List<String> inputs) {
+        this.inputs = inputs;
+        return this;
+    }
+
+    public List<String> getInputsAll() {
+        return inputsAll;
+    }
+
+    public GroovyMapData setInputsAll(List<String> inputsAll) {
+        this.inputsAll = inputsAll;
+        return this;
+    }
+
+    public String getUserName() {
+        return userName;
+    }
+
+    public GroovyMapData setUserName(String userName) {
+        this.userName = userName;
+        return this;
+    }
+
+    public String getShellName() {
+        return shellName;
+    }
+
+    public GroovyMapData setShellName(String shellName) {
+        this.shellName = shellName;
+        return this;
+    }
+
+    public String getMaxWallTime() {
+        return maxWallTime;
+    }
+
+    public GroovyMapData setMaxWallTime(String maxWallTime) {
+        this.maxWallTime = maxWallTime;
+        return this;
+    }
+
+    public String getQualityOfService() {
+        return qualityOfService;
+    }
+
+    public GroovyMapData setQualityOfService(String qualityOfService) {
+        this.qualityOfService = qualityOfService;
+        return this;
+    }
+
+    public String getQueueName() {
+        return queueName;
+    }
+
+    public GroovyMapData setQueueName(String queueName) {
+        this.queueName = queueName;
+        return this;
+    }
+
+    public Integer getNodes() {
+        return nodes;
+    }
+
+    public GroovyMapData setNodes(Integer nodes) {
+        this.nodes = nodes;
+        return this;
+    }
+
+    public Integer getProcessPerNode() {
+        return processPerNode;
+    }
+
+    public GroovyMapData setProcessPerNode(Integer processPerNode) {
+        this.processPerNode = processPerNode;
+        return this;
+    }
+
+    public Integer getCpuCount() {
+        return cpuCount;
+    }
+
+    public GroovyMapData setCpuCount(Integer cpuCount) {
+        this.cpuCount = cpuCount;
+        return this;
+    }
+
+    public Integer getUsedMem() {
+        return usedMem;
+    }
+
+    public GroovyMapData setUsedMem(Integer usedMem) {
+        this.usedMem = usedMem;
+        return this;
+    }
+
+    public String getMailAddress() {
+        return mailAddress;
+    }
+
+    public GroovyMapData setMailAddress(String mailAddress) {
+        this.mailAddress = mailAddress;
+        return this;
+    }
+
+    public List<String> getExports() {
+        return exports;
+    }
+
+    public GroovyMapData setExports(List<String> exports) {
+        this.exports = exports;
+        return this;
+    }
+
+    public List<String> getModuleCommands() {
+        return moduleCommands;
+    }
+
+    public GroovyMapData setModuleCommands(List<String> moduleCommands) {
+        this.moduleCommands = moduleCommands;
+        return this;
+    }
+
+    public List<String> getPreJobCommands() {
+        return preJobCommands;
+    }
+
+    public GroovyMapData setPreJobCommands(List<String> preJobCommands) {
+        this.preJobCommands = preJobCommands;
+        return this;
+    }
+
+    public List<String> getPostJobCommands() {
+        return postJobCommands;
+    }
+
+    public GroovyMapData setPostJobCommands(List<String> postJobCommands) {
+        this.postJobCommands = postJobCommands;
+        return this;
+    }
+
+    public String getJobSubmitterCommand() {
+        return jobSubmitterCommand;
+    }
+
+    public GroovyMapData setJobSubmitterCommand(String jobSubmitterCommand) {
+        this.jobSubmitterCommand = jobSubmitterCommand;
+        return this;
+    }
+
+    public String getChassisName() {
+        return chassisName;
+    }
+
+    public GroovyMapData setChassisName(String chassisName) {
+        this.chassisName = chassisName;
+        return this;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/Script.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/Script.java
new file mode 100644
index 0000000..208e9e5
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/Script.java
@@ -0,0 +1,43 @@
+package org.apache.airavata.helix.impl.task.submission;
+
+public enum Script {
+
+    SHELL_NAME("shellName"),
+    QUEUE_NAME("queueName"),
+    NODES("nodes"),
+    CPU_COUNT("cpuCount"),
+    MAIL_ADDRESS("mailAddress"),
+    ACCOUNT_STRING("accountString"),
+    MAX_WALL_TIME("maxWallTime"),
+    JOB_NAME("jobName"),
+    STANDARD_OUT_FILE("standardOutFile"),
+    STANDARD_ERROR_FILE("standardErrorFile"),
+    QUALITY_OF_SERVICE("qualityOfService"),
+    RESERVATION("reservation"),
+    EXPORTS("exports"),
+    MODULE_COMMANDS("moduleCommands"),
+    SCRATCH_LOCATION("scratchLocation"),
+    WORKING_DIR("workingDirectory"),
+    PRE_JOB_COMMANDS("preJobCommands"),
+    JOB_SUBMITTER_COMMAND("jobSubmitterCommand"),
+    EXECUTABLE_PATH("executablePath"),
+    INPUTS("inputs"),
+    INPUTS_ALL("inputsAll"),
+    POST_JOB_COMMANDS("postJobCommands"),
+    USED_MEM("usedMem"),
+    PROCESS_PER_NODE("processPerNode"),
+    CHASSIS_NAME("chassisName"),
+    INPUT_DIR("inputDir"),
+    OUTPUT_DIR("outputDir"),
+    USER_NAME("userName"),
+    GATEWAY_ID("gatewayId"),
+    GATEWAY_USER_NAME("gatewayUserName"),
+    APPLICATION_NAME("applicationName"),
+    QUEUE_SPECIFIC_MACROS("queueSpecificMacros")
+    ;
+
+    String name;
+    Script(String name) {
+        this.name = name;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ScriptTag.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ScriptTag.java
new file mode 100644
index 0000000..c03c11f
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ScriptTag.java
@@ -0,0 +1,13 @@
+package org.apache.airavata.helix.impl.task.submission;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface ScriptTag {
+    public String name();
+    public boolean mandatory() default false;
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/SubmissionUtil.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/SubmissionUtil.java
new file mode 100644
index 0000000..e2cbfee
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/SubmissionUtil.java
@@ -0,0 +1,10 @@
+package org.apache.airavata.helix.impl.task.submission;
+
+import java.io.File;
+
+public class SubmissionUtil {
+
+    public static File createJobFile(GroovyMapData mapData) {
+        return null;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
new file mode 100644
index 0000000..b04ffd8
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
@@ -0,0 +1,102 @@
+package org.apache.airavata.helix.impl.task.submission.config;
+
+import org.apache.airavata.helix.impl.task.submission.config.imp.*;
+import org.apache.airavata.helix.impl.task.submission.config.imp.parser.*;
+import org.apache.airavata.model.appcatalog.computeresource.*;
+import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
+import org.apache.airavata.registry.cpi.AppCatalog;
+import org.apache.airavata.registry.cpi.AppCatalogException;
+import org.apache.airavata.registry.cpi.RegistryException;
+
+public class JobFactory {
+
+    public static String getTemplateFileName(ResourceJobManagerType resourceJobManagerType) {
+        switch (resourceJobManagerType) {
+            case FORK:
+                return "FORK_Groovy.template";
+            case PBS:
+                return "PBS_Groovy.template";
+            case SLURM:
+                return "SLURM_Groovy.template";
+            case UGE:
+                return "UGE_Groovy.template";
+            case LSF:
+                return "LSF_Groovy.template";
+            case CLOUD:
+                return "CLOUD_Groovy.template";
+            default:
+                return null;
+        }
+    }
+
+    public static ResourceJobManager getResourceJobManager(AppCatalog appCatalog, JobSubmissionProtocol submissionProtocol, JobSubmissionInterface jobSubmissionInterface) {
+        try {
+            if (submissionProtocol == JobSubmissionProtocol.SSH ) {
+                SSHJobSubmission sshJobSubmission = getSSHJobSubmission(appCatalog, jobSubmissionInterface.getJobSubmissionInterfaceId());
+                if (sshJobSubmission != null) {
+                    return sshJobSubmission.getResourceJobManager();
+                }
+            } else if (submissionProtocol == JobSubmissionProtocol.LOCAL) {
+                LOCALSubmission localJobSubmission = getLocalJobSubmission(appCatalog, jobSubmissionInterface.getJobSubmissionInterfaceId());
+                if (localJobSubmission != null) {
+                    return localJobSubmission.getResourceJobManager();
+                }
+            } else if (submissionProtocol == JobSubmissionProtocol.SSH_FORK){
+                SSHJobSubmission sshJobSubmission = getSSHJobSubmission(appCatalog, jobSubmissionInterface.getJobSubmissionInterfaceId());
+                if (sshJobSubmission != null) {
+                    return sshJobSubmission.getResourceJobManager();
+                }
+            }
+        } catch (AppCatalogException e) {
+            e.printStackTrace();
+        }
+        return null;
+    }
+
+    public static LOCALSubmission getLocalJobSubmission(AppCatalog appCatalog, String submissionId) throws AppCatalogException {
+        try {
+            return appCatalog.getComputeResource().getLocalJobSubmission(submissionId);
+        } catch (Exception e) {
+            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
+    public static SSHJobSubmission getSSHJobSubmission(AppCatalog appCatalog, String submissionId) throws AppCatalogException {
+        try {
+            return appCatalog.getComputeResource().getSSHJobSubmission(submissionId);
+        } catch (Exception e) {
+            String errorMsg = "Error while retrieving SSH job submission with submission id : " + submissionId;
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
+    public static JobManagerConfiguration getJobManagerConfiguration(ResourceJobManager resourceJobManager) throws Exception {
+        if(resourceJobManager == null)
+            return null;
+
+
+        String templateFileName = getTemplateFileName(resourceJobManager.getResourceJobManagerType());
+        switch (resourceJobManager.getResourceJobManagerType()) {
+            case PBS:
+                return new PBSJobConfiguration(templateFileName, ".pbs", resourceJobManager.getJobManagerBinPath(),
+                        resourceJobManager.getJobManagerCommands(), new PBSOutputParser());
+            case SLURM:
+                return new SlurmJobConfiguration(templateFileName, ".slurm", resourceJobManager
+                        .getJobManagerBinPath(), resourceJobManager.getJobManagerCommands(), new SlurmOutputParser());
+            case LSF:
+                return new LSFJobConfiguration(templateFileName, ".lsf", resourceJobManager.getJobManagerBinPath(),
+                        resourceJobManager.getJobManagerCommands(), new LSFOutputParser());
+            case UGE:
+                return new UGEJobConfiguration(templateFileName, ".pbs", resourceJobManager.getJobManagerBinPath(),
+                        resourceJobManager.getJobManagerCommands(), new UGEOutputParser());
+            case FORK:
+                return new ForkJobConfiguration(templateFileName, ".sh", resourceJobManager.getJobManagerBinPath(),
+                        resourceJobManager.getJobManagerCommands(), new ForkOutputParser());
+            // We don't have a job configuration manager for CLOUD type
+            default:
+                return null;
+        }
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java
new file mode 100644
index 0000000..1fafb00
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java
@@ -0,0 +1,29 @@
+package org.apache.airavata.helix.impl.task.submission.config;
+
+public interface JobManagerConfiguration {
+
+    public RawCommandInfo getCancelCommand(String jobID);
+
+    public String getJobDescriptionTemplateName();
+
+    public RawCommandInfo getMonitorCommand(String jobID);
+
+    public RawCommandInfo getUserBasedMonitorCommand(String userName);
+
+    public RawCommandInfo getJobIdMonitorCommand(String jobName , String userName);
+
+    public String getScriptExtension();
+
+    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath);
+
+    public OutputParser getParser();
+
+    public String getInstalledPath();
+
+    public String getBaseCancelCommand();
+
+    public String getBaseMonitorCommand();
+
+    public String getBaseSubmitCommand();
+
+}
\ No newline at end of file
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java
new file mode 100644
index 0000000..41e8892
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java
@@ -0,0 +1,41 @@
+package org.apache.airavata.helix.impl.task.submission.config;
+
+import org.apache.airavata.model.status.JobStatus;
+
+import java.util.Map;
+
+public interface OutputParser {
+
+    /**
+     * This can be used to parseSingleJob the result of a job submission to get the JobID
+     * @param rawOutput
+     * @return the job id as a String, or null if no job id found
+     */
+    public String parseJobSubmission(String rawOutput) throws Exception;
+
+
+    /**
+     * Parse output return by job submission task and identify jobSubmission failures.
+     * @param rawOutput
+     * @return true if job submission has been failed, false otherwise.
+     */
+    public boolean isJobSubmissionFailed(String rawOutput);
+
+
+    /**
+     * This can be used to get the job status from the output
+     * @param jobID
+     * @param rawOutput
+     */
+    public JobStatus parseJobStatus(String jobID, String rawOutput) throws Exception;
+
+    /**
+     * This can be used to parseSingleJob a big output and get multipleJob statuses
+     * @param statusMap list of status map will return and key will be the job ID
+     * @param rawOutput
+     */
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws Exception;
+
+
+    public String parseJobId(String jobName, String rawOutput) throws Exception;
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java
new file mode 100644
index 0000000..d7f9fb3
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java
@@ -0,0 +1,22 @@
+package org.apache.airavata.helix.impl.task.submission.config;
+
+public class RawCommandInfo {
+
+    private String rawCommand;
+
+    public RawCommandInfo(String cmd) {
+        this.rawCommand = cmd;
+    }
+
+    public String getCommand() {
+        return this.rawCommand;
+    }
+
+    public String getRawCommand() {
+        return rawCommand;
+    }
+
+    public void setRawCommand(String rawCommand) {
+        this.rawCommand = rawCommand;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/ForkJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/ForkJobConfiguration.java
new file mode 100644
index 0000000..d25f17f
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/ForkJobConfiguration.java
@@ -0,0 +1,113 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp;
+
+import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
+import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.util.Map;
+
+public class ForkJobConfiguration implements JobManagerConfiguration {
+    private final Map<JobManagerCommand, String> jobManagerCommands;
+    private String jobDescriptionTemplateName;
+    private String scriptExtension;
+    private String installedPath;
+    private OutputParser parser;
+
+    public ForkJobConfiguration (String jobDescriptionTemplateName, String scriptExtension, String installedPath,
+                                 Map<JobManagerCommand, String> jobManagerCommands, OutputParser parser){
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+	    installedPath = installedPath.trim();
+        if (installedPath.endsWith("/")) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+        this.jobManagerCommands = jobManagerCommands;
+    }
+
+    @Override
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + jobManagerCommands.get(JobManagerCommand.DELETION).trim() + " " +
+                jobID);
+    }
+
+    @Override
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    @Override
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return null;
+    }
+
+    @Override
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return null;
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        return null;
+    }
+
+    @Override
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    @Override
+    public RawCommandInfo getSubmitCommand(String workingDirectory, String forkFilePath) {
+        return new RawCommandInfo(this.installedPath + jobManagerCommands.get(JobManagerCommand.SUBMISSION).trim() + " " +
+                workingDirectory + File.separator + FilenameUtils.getName(forkFilePath));
+    }
+
+    @Override
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    @Override
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+    @Override
+    public String getBaseCancelCommand() {
+        return null;
+    }
+
+    @Override
+    public String getBaseMonitorCommand() {
+        return null;
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return null;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/JobUtil.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/JobUtil.java
new file mode 100644
index 0000000..36bce60
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/JobUtil.java
@@ -0,0 +1,58 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp;
+
+import org.apache.airavata.model.status.JobState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class JobUtil {
+	private static final Logger log = LoggerFactory.getLogger(JobUtil.class);
+
+	public static JobState getJobState(String status) {
+		log.info("parsing the job status returned : " + status);
+		if (status != null) {
+			if ("C".equals(status) || "CD".equals(status) || "E".equals(status) || "CG".equals(status) || "DONE".equals(status)) {
+				return JobState.COMPLETE;
+//			} else if ("H".equals(status) || "h".equals(status)) {
+//				return JobState.HELD;
+			} else if ("Q".equals(status) || "qw".equals(status) || "PEND".equals(status)) {
+				return JobState.QUEUED;
+			} else if ("R".equals(status) || "CF".equals(status) || "r".equals(status) || "RUN".equals(status)) {
+				return JobState.ACTIVE;
+//			} else if ("T".equals(status)) {
+//				return JobState.HELD;
+			} else if ("W".equals(status) || "PD".equals(status)) {
+				return JobState.QUEUED;
+			} else if ("S".equals(status) || "PSUSP".equals(status) || "USUSP".equals(status) || "SSUSP".equals(status)) {
+				return JobState.SUSPENDED;
+			} else if ("CA".equals(status)) {
+				return JobState.CANCELED;
+			} else if ("F".equals(status) || "NF".equals(status) || "TO".equals(status) || "EXIT".equals(status)) {
+				return JobState.FAILED;
+			} else if ("PR".equals(status) || "Er".equals(status)) {
+				return JobState.FAILED;
+			} else if ("U".equals(status) || ("UNKWN".equals(status))) {
+				return JobState.UNKNOWN;
+			}
+		}
+		return JobState.UNKNOWN;
+	}
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/LSFJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/LSFJobConfiguration.java
new file mode 100644
index 0000000..bccd7ee
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/LSFJobConfiguration.java
@@ -0,0 +1,120 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp;
+
+import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
+import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
+import org.apache.commons.io.FilenameUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.Map;
+
+public class LSFJobConfiguration implements JobManagerConfiguration {
+    private final static Logger logger = LoggerFactory.getLogger(LSFJobConfiguration.class);
+	private final Map<JobManagerCommand, String> jobMangerCommands;
+    private String jobDescriptionTemplateName;
+    private String scriptExtension;
+    private String installedPath;
+    private OutputParser parser;
+
+    public LSFJobConfiguration(String jobDescriptionTemplateName,
+                               String scriptExtension, String installedPath, Map<JobManagerCommand, String>
+		                               jobManagerCommands, OutputParser parser) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+        if (installedPath.endsWith("/") || installedPath.isEmpty()) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+	    this.jobMangerCommands = jobManagerCommands;
+    }
+
+    @Override
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "bkill " + jobID);
+    }
+
+    @Override
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    @Override
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "bjobs " + jobID);
+    }
+
+    @Override
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return new RawCommandInfo(this.installedPath + "bjobs -u " + userName);
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        return new RawCommandInfo(this.installedPath + "bjobs -J " + jobName);
+    }
+
+    @Override
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    @Override
+    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
+        return new RawCommandInfo(this.installedPath + "bsub < " +
+                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+    }
+
+    @Override
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    public void setParser(OutputParser parser) {
+        this.parser = parser;
+    }
+
+    @Override
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+
+    @Override
+    public String getBaseCancelCommand() {
+        return "bkill";
+    }
+
+    @Override
+    public String getBaseMonitorCommand() {
+        return "bjobs";
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return "bsub";
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/PBSJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/PBSJobConfiguration.java
new file mode 100644
index 0000000..aeedeb9
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/PBSJobConfiguration.java
@@ -0,0 +1,122 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp;
+
+import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
+import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.util.Map;
+
+public class PBSJobConfiguration implements JobManagerConfiguration {
+
+	private final Map<JobManagerCommand, String> jobManagerCommands;
+	private String jobDescriptionTemplateName;
+	private String scriptExtension;
+	private String installedPath;
+	private OutputParser parser;
+
+	public PBSJobConfiguration(String jobDescriptionTemplateName, String scriptExtension, String installedPath,
+	                           Map<JobManagerCommand, String> jobManagerCommands, OutputParser parser) {
+		this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+		this.scriptExtension = scriptExtension;
+		this.parser = parser;
+		installedPath = installedPath.trim();
+		if (installedPath.endsWith("/")) {
+			this.installedPath = installedPath;
+		} else {
+			this.installedPath = installedPath + "/";
+		}
+		this.jobManagerCommands = jobManagerCommands;
+	}
+
+	public RawCommandInfo getCancelCommand(String jobID) {
+		return new RawCommandInfo(this.installedPath + jobManagerCommands.get(JobManagerCommand.DELETION).trim() + " " +
+				jobID);
+	}
+
+	public String getJobDescriptionTemplateName() {
+		return jobDescriptionTemplateName;
+	}
+
+	public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
+		this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+	}
+
+	public RawCommandInfo getMonitorCommand(String jobID) {
+		return new RawCommandInfo(this.installedPath + jobManagerCommands.get(JobManagerCommand.JOB_MONITORING).trim()
+				+ " -f " + jobID);
+	}
+
+	public String getScriptExtension() {
+		return scriptExtension;
+	}
+
+	public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
+		return new RawCommandInfo(this.installedPath + jobManagerCommands.get(JobManagerCommand.SUBMISSION).trim() + " " +
+				workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+	}
+
+	public String getInstalledPath() {
+		return installedPath;
+	}
+
+	public void setInstalledPath(String installedPath) {
+		this.installedPath = installedPath;
+	}
+
+	public OutputParser getParser() {
+		return parser;
+	}
+
+	public void setParser(OutputParser parser) {
+		this.parser = parser;
+	}
+
+	public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+		return new RawCommandInfo(this.installedPath + jobManagerCommands.get(JobManagerCommand.JOB_MONITORING).trim()
+				+ " -u " + userName);
+	}
+
+	@Override
+	public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+		// For PBS there is no option to get jobDetails by JobName, so we search with userName
+		return new RawCommandInfo(this.installedPath + jobManagerCommands.get(JobManagerCommand.JOB_MONITORING).trim()
+				+ " -u " + userName + " -f  | grep \"Job_Name = " + jobName + "\" -B1");
+	}
+
+	@Override
+	public String getBaseCancelCommand() {
+		return jobManagerCommands.get(JobManagerCommand.DELETION).trim();
+	}
+
+	@Override
+	public String getBaseMonitorCommand() {
+		return jobManagerCommands.get(JobManagerCommand.JOB_MONITORING).trim();
+	}
+
+	@Override
+	public String getBaseSubmitCommand() {
+		return jobManagerCommands.get(JobManagerCommand.SUBMISSION).trim();
+	}
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/SlurmJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/SlurmJobConfiguration.java
new file mode 100644
index 0000000..fc431ce
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/SlurmJobConfiguration.java
@@ -0,0 +1,117 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp;
+
+import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
+import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.util.Map;
+
+public class SlurmJobConfiguration implements JobManagerConfiguration {
+	private final Map<JobManagerCommand, String> jMCommands;
+    private String jobDescriptionTemplateName;
+    private String scriptExtension;
+    private String installedPath;
+    private OutputParser parser;
+
+    public SlurmJobConfiguration(String jobDescriptionTemplateName,
+                                 String scriptExtension, String installedPath, Map<JobManagerCommand, String>
+		                                 jobManagerCommands, OutputParser parser) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+	    installedPath = installedPath.trim();
+        if (installedPath.endsWith("/")) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+	    this.jMCommands = jobManagerCommands;
+    }
+
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + jMCommands.get(JobManagerCommand.DELETION).trim() + " " + jobID);
+    }
+
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+    }
+
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + jMCommands.get(JobManagerCommand.JOB_MONITORING).trim() + " -j " + jobID);
+    }
+
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    public RawCommandInfo getSubmitCommand(String workingDirectory,String pbsFilePath) {
+          return new RawCommandInfo(this.installedPath + jMCommands.get(JobManagerCommand.SUBMISSION).trim() + " " +
+                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+    }
+
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+    public void setInstalledPath(String installedPath) {
+        this.installedPath = installedPath;
+    }
+
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    public void setParser(OutputParser parser) {
+        this.parser = parser;
+    }
+
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return new RawCommandInfo(this.installedPath + jMCommands.get(JobManagerCommand.JOB_MONITORING).trim() + " -u " + userName);
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        return new RawCommandInfo(this.installedPath + jMCommands.get(JobManagerCommand.JOB_MONITORING).trim() + " -n " + jobName + " -u " + userName);
+    }
+
+    @Override
+    public String getBaseCancelCommand() {
+	    return jMCommands.get(JobManagerCommand.DELETION).trim();
+    }
+
+    @Override
+    public String getBaseMonitorCommand() {
+        return jMCommands.get(JobManagerCommand.JOB_MONITORING).trim();
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return jMCommands.get(JobManagerCommand.SUBMISSION).trim();
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/UGEJobConfiguration.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/UGEJobConfiguration.java
new file mode 100644
index 0000000..6a12966
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/UGEJobConfiguration.java
@@ -0,0 +1,117 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp;
+
+import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
+import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.util.Map;
+
+public class UGEJobConfiguration implements JobManagerConfiguration {
+	private final Map<JobManagerCommand, String> jobManagerCommands;
+    private String jobDescriptionTemplateName;
+    private String scriptExtension;
+    private String installedPath;
+    private OutputParser parser;
+
+    public UGEJobConfiguration(String jobDescriptionTemplateName,
+                               String scriptExtension, String installedPath, Map<JobManagerCommand, String>
+		                               jobManagerCommands, OutputParser parser) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+        this.scriptExtension = scriptExtension;
+        this.parser = parser;
+        if (installedPath.endsWith("/")) {
+            this.installedPath = installedPath;
+        } else {
+            this.installedPath = installedPath + "/";
+        }
+	    this.jobManagerCommands = jobManagerCommands;
+    }
+
+    public RawCommandInfo getCancelCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "qdel " + jobID);
+    }
+
+    public String getJobDescriptionTemplateName() {
+        return jobDescriptionTemplateName;
+    }
+
+    public void setJobDescriptionTemplateName(String jobDescriptionTemplateName) {
+        this.jobDescriptionTemplateName = jobDescriptionTemplateName;
+    }
+
+    public RawCommandInfo getMonitorCommand(String jobID) {
+        return new RawCommandInfo(this.installedPath + "qstat -j " + jobID);
+    }
+
+    public String getScriptExtension() {
+        return scriptExtension;
+    }
+
+    public RawCommandInfo getSubmitCommand(String workingDirectory, String pbsFilePath) {
+        return new RawCommandInfo(this.installedPath + "qsub " +
+                workingDirectory + File.separator + FilenameUtils.getName(pbsFilePath));
+    }
+
+    public String getInstalledPath() {
+        return installedPath;
+    }
+
+    public void setInstalledPath(String installedPath) {
+        this.installedPath = installedPath;
+    }
+
+    public OutputParser getParser() {
+        return parser;
+    }
+
+    public void setParser(OutputParser parser) {
+        this.parser = parser;
+    }
+
+    public RawCommandInfo getUserBasedMonitorCommand(String userName) {
+        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
+    }
+
+    @Override
+    public RawCommandInfo getJobIdMonitorCommand(String jobName, String userName) {
+        // For PBS there is no option to get jobDetails by JobName, so we search with userName
+        return new RawCommandInfo(this.installedPath + "qstat -u " + userName);
+    }
+
+    @Override
+    public String  getBaseCancelCommand() {
+        return "qdel";
+    }
+
+    @Override
+    public String  getBaseMonitorCommand() {
+        return "qstat";
+    }
+
+    @Override
+    public String getBaseSubmitCommand() {
+        return "qsub ";
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/AiravataCustomCommandOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/AiravataCustomCommandOutputParser.java
new file mode 100644
index 0000000..c3a5a2e
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/AiravataCustomCommandOutputParser.java
@@ -0,0 +1,56 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+
+public class AiravataCustomCommandOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(AiravataCustomCommandOutputParser.class);
+
+    @Override
+    public String parseJobSubmission(String rawOutput) throws Exception {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public boolean isJobSubmissionFailed(String rawOutput) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public JobStatus parseJobStatus(String jobID, String rawOutput) throws Exception {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws Exception {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws Exception {
+        throw new UnsupportedOperationException();
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/ForkOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/ForkOutputParser.java
new file mode 100644
index 0000000..a4f48cc
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/ForkOutputParser.java
@@ -0,0 +1,58 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+
+public class ForkOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(ForkOutputParser.class);
+
+    @Override
+    public String parseJobSubmission(String rawOutput) throws Exception {
+	    return AiravataUtils.getId("JOB_ID_");
+    }
+
+    @Override
+    public boolean isJobSubmissionFailed(String rawOutput) {
+        return false;
+    }
+
+    @Override
+    public JobStatus parseJobStatus(String jobID, String rawOutput) throws Exception {
+        return null;
+    }
+
+    @Override
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws Exception {
+
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws Exception {
+        // For fork jobs there is no job ID, hence airavata generates a job ID
+        return AiravataUtils.getId(jobName);
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/LSFOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/LSFOutputParser.java
new file mode 100644
index 0000000..0bf812f
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/LSFOutputParser.java
@@ -0,0 +1,132 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class LSFOutputParser implements OutputParser {
+    private final static Logger logger = LoggerFactory.getLogger(LSFOutputParser.class);
+
+    @Override
+    public String parseJobSubmission(String rawOutput) throws Exception {
+        logger.debug(rawOutput);
+        if (rawOutput.indexOf("<") >= 0) {
+            return rawOutput.substring(rawOutput.indexOf("<")+1,rawOutput.indexOf(">"));
+        } else {
+            return null;
+        }
+    }
+
+    @Override
+    public boolean isJobSubmissionFailed(String rawOutput) {
+        return false;
+    }
+
+    @Override
+    public JobStatus parseJobStatus(String jobID, String rawOutput) throws Exception {
+        boolean jobFount = false;
+        logger.debug(rawOutput);
+        //todo this is not used anymore
+        return null;
+    }
+
+    @Override
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws Exception {
+        logger.debug(rawOutput);
+
+        String[]    info = rawOutput.split("\n");
+//        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            String jobName = jobID.split(",")[1];
+            boolean found = false;
+            for (int i = 0; i < info.length; i++) {
+                if (info[i].contains(jobName.substring(0,8))) {
+                    // now starts processing this line
+                    logger.info(info[i]);
+                    String correctLine = info[i];
+                    String[] columns = correctLine.split(" ");
+                    List<String> columnList = new ArrayList<String>();
+                    for (String s : columns) {
+                        if (!"".equals(s)) {
+                            columnList.add(s);
+                        }
+                    }
+//                    lastStop = i + 1;
+                    try {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf(columnList.get(2))));
+                    }catch(IndexOutOfBoundsException e) {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf("U")));
+                    }
+                    found = true;
+                    break;
+                }
+            }
+            if(!found)
+                logger.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobID.split(",")[0]);
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws Exception {
+        String regJobId = "jobId";
+        Pattern pattern = Pattern.compile("(?=(?<" + regJobId + ">\\d+)\\s+\\w+\\s+" + jobName + ")"); // regex - look ahead and match
+        if (rawOutput != null) {
+            Matcher matcher = pattern.matcher(rawOutput);
+            if (matcher.find()) {
+                return matcher.group(regJobId);
+            } else {
+                logger.error("No match is found for JobName");
+                return null;
+            }
+        } else {
+            logger.error("Error: RawOutput shouldn't be null");
+            return null;
+        }
+    }
+
+    public static void main(String[] args) {
+        String test = "Job <2477982> is submitted to queue <short>.";
+        System.out.println(test.substring(test.indexOf("<")+1, test.indexOf(">")));
+        String test1 = "JOBID   USER    STAT  QUEUE      FROM_HOST   EXEC_HOST   JOB_NAME   SUBMIT_TIME\n" +
+                "2636607 lg11w   RUN   long       ghpcc06     c11b02      *069656647 Mar  7 00:58\n" +
+                "2636582 lg11w   RUN   long       ghpcc06     c02b01      2134490944 Mar  7 00:48";
+        Map<String, JobStatus> statusMap = new HashMap<String, JobStatus>();
+        statusMap.put("2477983,2134490944", new JobStatus(JobState.UNKNOWN));
+        LSFOutputParser lsfOutputParser = new LSFOutputParser();
+        try {
+            lsfOutputParser.parseJobStatuses("cjh", statusMap, test1);
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        }
+        System.out.println(statusMap.get("2477983,2134490944"));
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/PBSOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/PBSOutputParser.java
new file mode 100644
index 0000000..3be8c8a
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/PBSOutputParser.java
@@ -0,0 +1,142 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.imp.JobUtil;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class PBSOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(PBSOutputParser.class);
+
+    public String parseJobSubmission(String rawOutput) {
+        log.debug(rawOutput);
+        String jobId = rawOutput;
+        if (!rawOutput.isEmpty() && rawOutput.contains("\n")){
+            String[] split = rawOutput.split("\n");
+            if (split.length != 0){
+                jobId = split[0];
+            }
+        }
+        return jobId;  //In PBS stdout is going to be directly the jobID
+    }
+
+    @Override
+    public boolean isJobSubmissionFailed(String rawOutput) {
+        return false;
+    }
+
+    public JobStatus parseJobStatus(String jobID, String rawOutput) {
+        boolean jobFount = false;
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        String[] line = null;
+        int index = 0;
+        for (String anInfo : info) {
+            index++;
+            if (anInfo.contains("Job Id:")) {
+                if (anInfo.contains(jobID)) {
+                    jobFount = true;
+                    break;
+                }
+            }
+        }
+        if (jobFount) {
+            for (int i=index;i<info.length;i++) {
+                String anInfo = info[i];
+                if (anInfo.contains("=")) {
+                    line = anInfo.split("=", 2);
+                    if (line.length != 0) {
+                        if (line[0].contains("job_state")) {
+	                        return new JobStatus(JobUtil.getJobState(line[1].replaceAll(" ", "")));
+                        }
+                    }
+                }
+            }
+        }
+        return null;
+    }
+
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) {
+        log.debug(rawOutput);
+        String[]    info = rawOutput.split("\n");
+//        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            String jobName = jobID.split(",")[1];
+            boolean found = false;
+            for (int i = 0; i < info.length; i++) {
+                if (info[i].contains(jobName.substring(0,8))) {
+                    // now starts processing this line
+                    log.info(info[i]);
+                    String correctLine = info[i];
+                    String[] columns = correctLine.split(" ");
+                    List<String> columnList = new ArrayList<String>();
+                    for (String s : columns) {
+                        if (!"".equals(s)) {
+                            columnList.add(s);
+                        }
+                    }
+//                    lastStop = i + 1;
+                    try {
+	                    statusMap.put(jobID, new JobStatus(JobUtil.getJobState(columnList.get(9))));
+                    }catch(IndexOutOfBoundsException e) {
+	                    statusMap.put(jobID, new JobStatus(JobUtil.getJobState("U")));
+                    }
+                    found = true;
+                    break;
+                }
+            }
+            if(!found)
+            log.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobID.split(",")[0]);
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws Exception {
+        /* output will look like
+        Job Id: 2080802.gordon-fe2.local
+            Job_Name = A312402627
+        */
+        String regJobId = "jobId";
+        Pattern pattern = Pattern.compile("(?<" + regJobId + ">[^\\s]*)\\s*.* " + jobName);
+        if (rawOutput != null) {
+            Matcher matcher = pattern.matcher(rawOutput);
+            if (matcher.find()) {
+                return matcher.group(regJobId);
+            } else {
+                log.error("No match is found for JobName");
+                return null;
+            }
+        } else {
+            log.error("Error: RawOutput shouldn't be null");
+            return null;
+        }
+    }
+
+
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/SlurmOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/SlurmOutputParser.java
new file mode 100644
index 0000000..3ebbcfd
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/SlurmOutputParser.java
@@ -0,0 +1,137 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.imp.JobUtil;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class SlurmOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(SlurmOutputParser.class);
+    public static final int JOB_NAME_OUTPUT_LENGTH = 8;
+    public static final String STATUS = "status";
+	public static final String JOBID = "jobId";
+
+
+    /**
+     * This can be used to parseSingleJob the outpu of sbatch and extrac the jobID from the content
+     *
+     * @param rawOutput
+     * @return
+     */
+    public String parseJobSubmission(String rawOutput) throws Exception {
+	    log.info(rawOutput);
+	    Pattern pattern = Pattern.compile("Submitted batch job (?<" + JOBID + ">[^\\s]*)");
+	    Matcher matcher = pattern.matcher(rawOutput);
+	    if (matcher.find()) {
+		    return matcher.group(JOBID);
+	    }
+	    return "";
+    }
+
+    @Override
+    public boolean isJobSubmissionFailed(String rawOutput) {
+        Pattern pattern = Pattern.compile("FAILED");
+        Matcher matcher = pattern.matcher(rawOutput);
+        return matcher.find();
+    }
+
+    public JobStatus parseJobStatus(String jobID, String rawOutput) throws Exception {
+        log.info(rawOutput);
+        Pattern pattern = Pattern.compile(jobID + "(?=\\s+\\S+\\s+\\S+\\s+\\S+\\s+(?<" + STATUS + ">\\w+))");
+        Matcher matcher = pattern.matcher(rawOutput);
+        if (matcher.find()) {
+	        return new JobStatus(JobUtil.getJobState(matcher.group(STATUS)));
+        }
+        return null;
+    }
+
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) throws Exception {
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        String lastString = info[info.length - 1];
+        if (lastString.contains("JOBID") || lastString.contains("PARTITION")) {
+            log.info("There are no jobs with this username ... ");
+            return;
+        }
+//        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            String jobId = jobID.split(",")[0];
+            String jobName = jobID.split(",")[1];
+            boolean found = false;
+            for (int i = 0; i < info.length; i++) {
+                if (info[i].contains(jobName.substring(0, 8))) {
+                    // now starts processing this line
+                    log.info(info[i]);
+                    String correctLine = info[i];
+                    String[] columns = correctLine.split(" ");
+                    List<String> columnList = new ArrayList<String>();
+                    for (String s : columns) {
+                        if (!"".equals(s)) {
+                            columnList.add(s);
+                        }
+                    }
+                    try {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf(columnList.get(4))));
+                    } catch (IndexOutOfBoundsException e) {
+	                    statusMap.put(jobID, new JobStatus(JobState.valueOf("U")));
+                    }
+                    found = true;
+                    break;
+                }
+            }
+            if (!found) {
+                log.error("Couldn't find the status of the Job with JobName: " + jobName + "Job Id: " + jobId);
+            }
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws Exception {
+        String regJobId = "jobId";
+        if (jobName == null) {
+            return null;
+        } else if(jobName.length() > JOB_NAME_OUTPUT_LENGTH) {
+            jobName = jobName.substring(0, JOB_NAME_OUTPUT_LENGTH);
+        }
+        Pattern pattern = Pattern.compile("(?=(?<" + regJobId + ">\\d+)\\s+\\w+\\s+" + jobName + ")"); // regex - look ahead and match
+        if (rawOutput != null) {
+            Matcher matcher = pattern.matcher(rawOutput);
+            if (matcher.find()) {
+                return matcher.group(regJobId);
+            } else {
+                log.error("No match is found for JobName");
+                return null;
+            }
+        } else {
+            log.error("Error: RawOutput shouldn't be null");
+            return null;
+        }
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/UGEOutputParser.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/UGEOutputParser.java
new file mode 100644
index 0000000..0f457ff
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/imp/parser/UGEOutputParser.java
@@ -0,0 +1,108 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.helix.impl.task.submission.config.imp.parser;
+
+import org.apache.airavata.helix.impl.task.submission.config.OutputParser;
+import org.apache.airavata.helix.impl.task.submission.config.imp.parser.PBSOutputParser;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class UGEOutputParser implements OutputParser {
+    private static final Logger log = LoggerFactory.getLogger(PBSOutputParser.class);
+    public static final String JOB_ID = "jobId";
+
+	public String parseJobSubmission(String rawOutput) {
+		log.debug(rawOutput);
+		if (rawOutput != null && !rawOutput.isEmpty() && !isJobSubmissionFailed(rawOutput)) {
+			String[] info = rawOutput.split("\n");
+			String lastLine = info[info.length - 1];
+			return lastLine.split(" ")[2]; // In PBS stdout is going to be directly the jobID
+		} else {
+			return "";
+		}
+	}
+
+    @Override
+    public boolean isJobSubmissionFailed(String rawOutput) {
+        Pattern pattern = Pattern.compile("Rejecting");
+        Matcher matcher = pattern.matcher(rawOutput);
+        return matcher.find();
+    }
+
+    public JobStatus parseJobStatus(String jobID, String rawOutput) {
+        Pattern pattern = Pattern.compile("job_number:[\\s]+" + jobID);
+        Matcher matcher = pattern.matcher(rawOutput);
+        if (matcher.find()) {
+	        return new JobStatus(JobState.QUEUED); // fixme; return correct status.
+        }
+	    return new JobStatus(JobState.UNKNOWN);
+    }
+
+    public void parseJobStatuses(String userName, Map<String, JobStatus> statusMap, String rawOutput) {
+        log.debug(rawOutput);
+        String[] info = rawOutput.split("\n");
+        int lastStop = 0;
+        for (String jobID : statusMap.keySet()) {
+            for(int i=lastStop;i<info.length;i++){
+               if(jobID.split(",")[0].contains(info[i].split(" ")[0]) && !"".equals(info[i].split(" ")[0])){
+                   // now starts processing this line
+                   log.info(info[i]);
+                   String correctLine = info[i];
+                   String[] columns = correctLine.split(" ");
+                   List<String> columnList = new ArrayList<String>();
+                   for (String s : columns) {
+                       if (!"".equals(s)) {
+                           columnList.add(s);
+                       }
+                   }
+                   lastStop = i+1;
+                   if ("E".equals(columnList.get(4))) {
+                       // There is another status with the same letter E other than error status
+                       // to avoid that we make a small tweek to the job status
+                       columnList.set(4, "Er");
+                   }
+	               statusMap.put(jobID, new JobStatus(JobState.valueOf(columnList.get(4))));
+	               break;
+               }
+            }
+        }
+    }
+
+    @Override
+    public String parseJobId(String jobName, String rawOutput) throws Exception {
+        if (jobName.length() > 10) {
+            jobName = jobName.substring(0, 10);
+        }
+        Pattern pattern = Pattern.compile("(?<" + JOB_ID + ">\\S+)\\s+\\S+\\s+(" + jobName + ")");
+        Matcher matcher = pattern.matcher(rawOutput);
+        if (matcher.find()) {
+            return matcher.group(JOB_ID);
+        }
+        return null;
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
new file mode 100644
index 0000000..fb9917f
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -0,0 +1,232 @@
+package org.apache.airavata.helix.impl.task.submission.task;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.JobSubmissionOutput;
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
+import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
+import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.model.commons.ErrorModel;
+import org.apache.airavata.model.experiment.ExperimentModel;
+import org.apache.airavata.model.job.JobModel;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.apache.airavata.model.status.TaskState;
+import org.apache.airavata.model.status.TaskStatus;
+import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
+import org.apache.commons.io.FileUtils;
+import org.apache.helix.task.TaskResult;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+@TaskDef(name = "Default Job Submission")
+public class DefaultJobSubmissionTask extends JobSubmissionTask {
+
+    private static final Logger logger = LogManager.getLogger(DefaultJobSubmissionTask.class);
+
+    public static final String DEFAULT_JOB_ID = "DEFAULT_JOB_ID";
+
+    @Override
+    public TaskResult onRun(TaskHelper taskHelper) {
+        try {
+            GroovyMapData groovyMapData = new GroovyMapData();
+
+
+            JobModel jobModel = new JobModel();
+            jobModel.setProcessId(getProcessId());
+            jobModel.setWorkingDir(groovyMapData.getWorkingDirectory());
+            jobModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
+            jobModel.setTaskId(getTaskId());
+            jobModel.setJobName(groovyMapData.getJobName());
+
+            File jobFile = SubmissionUtil.createJobFile(groovyMapData);
+
+
+            if (jobFile != null && jobFile.exists()) {
+                jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
+                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
+                        getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
+
+                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, jobFile, groovyMapData.getWorkingDirectory());
+
+                jobModel.setExitCode(submissionOutput.getExitCode());
+                jobModel.setStdErr(submissionOutput.getStdErr());
+                jobModel.setStdOut(submissionOutput.getStdOut());
+
+                String jobId = submissionOutput.getJobId();
+
+                if (submissionOutput.getExitCode() != 0 || submissionOutput.isJobSubmissionFailed()) {
+                    jobModel.setJobId(DEFAULT_JOB_ID);
+                    if (submissionOutput.isJobSubmissionFailed()) {
+                        List<JobStatus> statusList = new ArrayList<>();
+                        statusList.add(new JobStatus(JobState.FAILED));
+                        statusList.get(0).setReason(submissionOutput.getFailureReason());
+                        jobModel.setJobStatuses(statusList);
+                        saveJobModel(jobModel);
+                        logger.error("expId: " + getExperimentId() + ", processid: " + getProcessId()+ ", taskId: " +
+                                getTaskId() + " :- Job submission failed for job name " + jobModel.getJobName());
+
+                        ErrorModel errorModel = new ErrorModel();
+                        errorModel.setUserFriendlyMessage(submissionOutput.getFailureReason());
+                        errorModel.setActualErrorMessage(submissionOutput.getFailureReason());
+                        saveExperimentError(errorModel);
+                        saveProcessError(errorModel);
+                        saveTaskError(errorModel);
+                        //taskStatus.setState(TaskState.FAILED);
+                        //taskStatus.setReason("Job submission command didn't return a jobId");
+                        //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                        //taskContext.setTaskStatus(taskStatus);
+                        return onFail("Job submission command didn't return a jobId", false, null);
+
+                    } else {
+                        String msg;
+                        saveJobModel(jobModel);
+                        ErrorModel errorModel = new ErrorModel();
+                        if (submissionOutput.getExitCode() != Integer.MIN_VALUE) {
+                            msg = "expId:" + getExperimentId() + ", processId:" + getProcessId() + ", taskId: " + getTaskId() +
+                                    " return non zero exit code:" + submissionOutput.getExitCode() + "  for JobName:" + jobModel.getJobName() +
+                                    ", with failure reason : " + submissionOutput.getFailureReason()
+                                    + " Hence changing job state to Failed." ;
+                            errorModel.setActualErrorMessage(submissionOutput.getFailureReason());
+                        } else {
+                            msg = "expId:" + getExperimentId() + ", processId:" + getProcessId() + ", taskId: " + getTaskId() +
+                                    " doesn't  return valid job submission exit code for JobName:" + jobModel.getJobName() +
+                                    ", with failure reason : stdout ->" + submissionOutput.getStdOut() +
+                                    " stderr -> " + submissionOutput.getStdErr() + " Hence changing job state to Failed." ;
+                            errorModel.setActualErrorMessage(msg);
+                        }
+                        logger.error(msg);
+                        errorModel.setUserFriendlyMessage(msg);
+                        saveExperimentError(errorModel);
+                        saveProcessError(errorModel);
+                        saveTaskError(errorModel);
+                        //taskStatus.setState(TaskState.FAILED);
+                        //taskStatus.setReason(msg);
+                        //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                        //taskContext.setTaskStatus(taskStatus);
+                        return onFail(msg, false, null);
+                    }
+
+                    //TODO save task status??
+                } else if (jobId != null && !jobId.isEmpty()) {
+                    jobModel.setJobId(jobId);
+                    saveJobModel(jobModel);
+                    JobStatus jobStatus = new JobStatus();
+                    jobStatus.setJobState(JobState.SUBMITTED);
+                    jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
+                    jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                    jobModel.setJobStatuses(Arrays.asList(jobStatus));
+                    saveJobStatus(jobModel);
+
+                    if (verifyJobSubmissionByJobId(adaptor, jobId)) {
+                        jobStatus.setJobState(JobState.QUEUED);
+                        jobStatus.setReason("Verification step succeeded");
+                        jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                        jobModel.setJobStatuses(Arrays.asList(jobStatus));
+                        saveJobStatus(jobModel);
+                    }
+
+                    if (getComputeResourceDescription().isGatewayUsageReporting()){
+                        String loadCommand = getComputeResourceDescription().getGatewayUsageModuleLoadCommand();
+                        String usageExecutable = getComputeResourceDescription().getGatewayUsageExecutable();
+                        ExperimentModel experiment = (ExperimentModel)getExperimentCatalog().get(ExperimentCatalogModelType.EXPERIMENT, getExperimentId());
+                        String username = experiment.getUserName() + "@" + getGatewayComputeResourcePreference().getUsageReportingGatewayId();
+                        RawCommandInfo rawCommandInfo = new RawCommandInfo(loadCommand + " && " + usageExecutable + " -gateway_user " +  username  +
+                                " -submit_time \"`date '+%F %T %:z'`\"  -jobid " + jobId );
+                        adaptor.executeCommand(rawCommandInfo.getRawCommand(), null);
+                    }
+                    //taskStatus = new TaskStatus(TaskState.COMPLETED);
+                    //taskStatus.setReason("Submitted job to compute resource");
+                    //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+
+                    return onSuccess("Submitted job to compute resource");
+                } else {
+                    int verificationTryCount = 0;
+                    while (verificationTryCount++ < 3) {
+                        String verifyJobId = verifyJobSubmission(adaptor, jobModel.getJobName(), getComputeResourceLoginUserName());
+                        if (verifyJobId != null && !verifyJobId.isEmpty()) {
+                            // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
+                            jobId = verifyJobId;
+                            jobModel.setJobId(jobId);
+                            saveJobModel(jobModel);
+                            JobStatus jobStatus = new JobStatus();
+                            jobStatus.setJobState(JobState.QUEUED);
+                            jobStatus.setReason("Verification step succeeded");
+                            jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                            jobModel.setJobStatuses(Arrays.asList(jobStatus));
+                            saveJobStatus(jobModel);
+                            //taskStatus.setState(TaskState.COMPLETED);
+                            //taskStatus.setReason("Submitted job to compute resource");
+                            //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                            break;
+                        }
+                        logger.info("Verify step return invalid jobId, retry verification step in " + (verificationTryCount * 10) + " secs");
+                        Thread.sleep(verificationTryCount * 10000);
+                    }
+                }
+
+                if (jobId == null || jobId.isEmpty()) {
+                    jobModel.setJobId(DEFAULT_JOB_ID);
+                    saveJobModel(jobModel);
+                    String msg = "expId:" + getExperimentId() + " Couldn't find " +
+                            "remote jobId for JobName:" + jobModel.getJobName() + ", both submit and verify steps " +
+                            "doesn't return a valid JobId. " + "Hence changing experiment state to Failed";
+                    logger.error(msg);
+                    ErrorModel errorModel = new ErrorModel();
+                    errorModel.setUserFriendlyMessage(msg);
+                    errorModel.setActualErrorMessage(msg);
+                    saveExperimentError(errorModel);
+                    saveProcessError(errorModel);
+                    saveTaskError(errorModel);
+                    //taskStatus.setState(TaskState.FAILED);
+                    //taskStatus.setReason("Couldn't find job id in both submitted and verified steps");
+                    //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                    return onFail("Couldn't find job id in both submitted and verified steps", false, null);
+                }else {
+                    //GFacUtils.saveJobModel(processContext, jobModel);
+                }
+
+            }  else {
+                //taskStatus.setState(TaskState.FAILED);
+                if (jobFile == null) {
+                    return onFail("Job file is null", true, null);
+                  //  taskStatus.setReason("JobFile is null");
+                } else {
+                    //taskStatus.setReason("Job file doesn't exist");
+                    return onFail("Job file doesn't exist", true, null);
+                }
+            }
+        } catch (Exception e) {
+            return onFail("Task failed due to unexpected issue", false, null);
+        }
+        // TODO get rid of this
+        return onFail("Task moved to an unknown state", false, null);
+    }
+
+    private boolean verifyJobSubmissionByJobId(AgentAdaptor agentAdaptor, String jobID) throws Exception {
+        JobStatus status = getJobStatus(agentAdaptor, jobID);
+        return status != null &&  status.getJobState() != JobState.UNKNOWN;
+    }
+
+    private String verifyJobSubmission(AgentAdaptor agentAdaptor, String jobName, String userName) {
+        String jobId = null;
+        try {
+            jobId  = getJobIdByJobName(agentAdaptor, jobName, userName);
+        } catch (Exception e) {
+            logger.error("Error while verifying JobId from JobName " + jobName);
+        }
+        return jobId;
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
new file mode 100644
index 0000000..da04365
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
@@ -0,0 +1,79 @@
+package org.apache.airavata.helix.impl.task.submission.task;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.JobSubmissionOutput;
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
+import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.model.job.JobModel;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.apache.commons.io.FileUtils;
+import org.apache.helix.task.TaskResult;
+
+import java.io.File;
+import java.util.Arrays;
+
+@TaskDef(name = "Fork Job Submission")
+public class ForkJobSubmissionTask extends JobSubmissionTask {
+
+    @Override
+    public TaskResult onRun(TaskHelper taskHelper) {
+
+        try {
+            GroovyMapData groovyMapData = new GroovyMapData();
+
+            JobModel jobModel = new JobModel();
+            jobModel.setProcessId(getProcessId());
+            jobModel.setWorkingDir(groovyMapData.getWorkingDirectory());
+            jobModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
+            jobModel.setTaskId(getTaskId());
+            jobModel.setJobName(groovyMapData.getJobName());
+
+            File jobFile = SubmissionUtil.createJobFile(groovyMapData);
+
+            if (jobFile != null && jobFile.exists()) {
+                jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
+                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
+                        getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
+
+                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, jobFile, groovyMapData.getWorkingDirectory());
+
+                jobModel.setExitCode(submissionOutput.getExitCode());
+                jobModel.setStdErr(submissionOutput.getStdErr());
+                jobModel.setStdOut(submissionOutput.getStdOut());
+
+                String jobId = submissionOutput.getJobId();
+
+                if (jobId != null && !jobId.isEmpty()) {
+                    jobModel.setJobId(jobId);
+                    saveJobModel(jobModel);
+                    JobStatus jobStatus = new JobStatus();
+                    jobStatus.setJobState(JobState.SUBMITTED);
+                    jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
+                    jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                    jobModel.setJobStatuses(Arrays.asList(jobStatus));
+                    saveJobStatus(jobModel);
+
+                    return null;
+                } else {
+                    String msg = "expId:" + getExperimentId() + " Couldn't find remote jobId for JobName:" +
+                            jobModel.getJobName() + ", both submit and verify steps doesn't return a valid JobId. " +
+                            "Hence changing experiment state to Failed";
+                }
+
+            }
+            return null;
+
+        } catch (Exception e) {
+            return null;
+        }
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
new file mode 100644
index 0000000..fe5a3dc
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
@@ -0,0 +1,202 @@
+package org.apache.airavata.helix.impl.task.submission.task;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.CommandOutput;
+import org.apache.airavata.agents.api.JobSubmissionOutput;
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.AiravataTask;
+import org.apache.airavata.helix.impl.task.submission.config.JobFactory;
+import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
+import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
+import org.apache.airavata.messaging.core.MessageContext;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
+import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
+import org.apache.airavata.model.commons.ErrorModel;
+import org.apache.airavata.model.job.JobModel;
+import org.apache.airavata.model.messaging.event.JobIdentifier;
+import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
+import org.apache.airavata.model.messaging.event.MessageType;
+import org.apache.airavata.model.status.JobStatus;
+import org.apache.airavata.registry.cpi.*;
+import org.apache.helix.HelixManager;
+
+import java.io.File;
+import java.util.*;
+
+public abstract class JobSubmissionTask extends AiravataTask {
+
+
+
+    @Override
+    public void init(HelixManager manager, String workflowName, String jobName, String taskName) {
+        super.init(manager, workflowName, jobName, taskName);
+    }
+
+    //////////////////////
+    protected JobSubmissionOutput submitBatchJob(AgentAdaptor agentAdaptor, File jobFile, String workingDirectory) throws Exception {
+        JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
+                getAppCatalog(), getJobSubmissionProtocol(), getPreferredJobSubmissionInterface()));
+        RawCommandInfo submitCommand = jobManagerConfiguration.getSubmitCommand(workingDirectory, jobFile.getPath());
+        CommandOutput commandOutput = agentAdaptor.executeCommand(submitCommand.getRawCommand(), workingDirectory);
+
+        JobSubmissionOutput jsoutput = new JobSubmissionOutput();
+
+        jsoutput.setJobId(jobManagerConfiguration.getParser().parseJobSubmission(commandOutput.getStdOut()));
+        if (jsoutput.getJobId() == null) {
+            if (jobManagerConfiguration.getParser().isJobSubmissionFailed(commandOutput.getStdOut())) {
+                jsoutput.setJobSubmissionFailed(true);
+                jsoutput.setFailureReason("stdout : " + commandOutput.getStdOut() +
+                        "\n stderr : " + commandOutput.getStdError());
+            }
+        }
+        jsoutput.setExitCode(commandOutput.getExitCode());
+        if (jsoutput.getExitCode() != 0) {
+            jsoutput.setJobSubmissionFailed(true);
+            jsoutput.setFailureReason("stdout : " + commandOutput.getStdOut() +
+                    "\n stderr : " + commandOutput.getStdError());
+        }
+        jsoutput.setStdOut(commandOutput.getStdOut());
+        jsoutput.setStdErr(commandOutput.getStdError());
+        return jsoutput;
+
+    }
+
+    public JobStatus getJobStatus(AgentAdaptor agentAdaptor, String jobID) throws Exception {
+        JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
+                getAppCatalog(), getJobSubmissionProtocol(), getPreferredJobSubmissionInterface()));
+        CommandOutput commandOutput = agentAdaptor.executeCommand(jobManagerConfiguration.getMonitorCommand(jobID).getRawCommand(), null);
+
+        return jobManagerConfiguration.getParser().parseJobStatus(jobID, commandOutput.getStdOut());
+
+    }
+
+    public String getJobIdByJobName(AgentAdaptor agentAdaptor, String jobName, String userName) throws Exception {
+        JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
+                getAppCatalog(), getJobSubmissionProtocol(), getPreferredJobSubmissionInterface()));
+
+        RawCommandInfo jobIdMonitorCommand = jobManagerConfiguration.getJobIdMonitorCommand(jobName, userName);
+        CommandOutput commandOutput = agentAdaptor.executeCommand(jobIdMonitorCommand.getRawCommand(), null);
+        return jobManagerConfiguration.getParser().parseJobId(jobName, commandOutput.getStdOut());
+    }
+
+    ////////////////////////////////
+
+
+    /////////////////////////////////////////////
+    public void saveExperimentError(ErrorModel errorModel) throws Exception {
+        try {
+            errorModel.setErrorId(AiravataUtils.getId("EXP_ERROR"));
+            getExperimentCatalog().add(ExpCatChildDataType.EXPERIMENT_ERROR, errorModel, getExperimentId());
+        } catch (RegistryException e) {
+            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId()
+                    + " : - Error while updating experiment errors";
+            throw new Exception(msg, e);
+        }
+    }
+
+    public void saveProcessError(ErrorModel errorModel) throws Exception {
+        try {
+            errorModel.setErrorId(AiravataUtils.getId("PROCESS_ERROR"));
+            getExperimentCatalog().add(ExpCatChildDataType.PROCESS_ERROR, errorModel, getProcessId());
+        } catch (RegistryException e) {
+            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId()
+                    + " : - Error while updating process errors";
+            throw new Exception(msg, e);
+        }
+    }
+
+    public void saveTaskError(ErrorModel errorModel) throws Exception {
+        try {
+            errorModel.setErrorId(AiravataUtils.getId("TASK_ERROR"));
+            getExperimentCatalog().add(ExpCatChildDataType.TASK_ERROR, errorModel, getTaskId());
+        } catch (RegistryException e) {
+            String msg = "expId: " + getExperimentId() + " processId: " + getProcessId() + " taskId: " + getTaskId()
+                    + " : - Error while updating task errors";
+            throw new Exception(msg, e);
+        }
+    }
+
+    public void saveJobModel(JobModel jobModel) throws RegistryException {
+        getExperimentCatalog().add(ExpCatChildDataType.JOB, jobModel, getProcessId());
+    }
+
+    public void saveJobStatus(JobModel jobModel) throws Exception {
+        try {
+            // first we save job jobModel to the registry for sa and then save the job status.
+            JobStatus jobStatus = null;
+            if(jobModel.getJobStatuses() != null)
+                jobStatus = jobModel.getJobStatuses().get(0);
+
+            List<JobStatus> statuses = new ArrayList<>();
+            statuses.add(jobStatus);
+            jobModel.setJobStatuses(statuses);
+
+            if (jobStatus.getTimeOfStateChange() == 0 || jobStatus.getTimeOfStateChange() > 0 ) {
+                jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+            } else {
+                jobStatus.setTimeOfStateChange(jobStatus.getTimeOfStateChange());
+            }
+
+            CompositeIdentifier ids = new CompositeIdentifier(jobModel.getTaskId(), jobModel.getJobId());
+            getExperimentCatalog().add(ExpCatChildDataType.JOB_STATUS, jobStatus, ids);
+            JobIdentifier identifier = new JobIdentifier(jobModel.getJobId(), jobModel.getTaskId(),
+                    getProcessId(), getProcessModel().getExperimentId(), getGatewayId());
+
+            JobStatusChangeEvent jobStatusChangeEvent = new JobStatusChangeEvent(jobStatus.getJobState(), identifier);
+            MessageContext msgCtx = new MessageContext(jobStatusChangeEvent, MessageType.JOB, AiravataUtils.getId
+                    (MessageType.JOB.name()), getGatewayId());
+            msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+            getStatusPublisher().publish(msgCtx);
+        } catch (Exception e) {
+            throw new Exception("Error persisting job status" + e.getLocalizedMessage(), e);
+        }
+    }
+
+    ///////////// required for groovy map
+
+    private String workingDir;
+    private String scratchLocation;
+    private UserComputeResourcePreference userComputeResourcePreference;
+
+    public String getWorkingDir() {
+        if (workingDir == null) {
+            if (getProcessModel().getProcessResourceSchedule().getStaticWorkingDir() != null){
+                workingDir = getProcessModel().getProcessResourceSchedule().getStaticWorkingDir();
+            }else {
+                String scratchLocation = getScratchLocation();
+                workingDir = (scratchLocation.endsWith("/") ? scratchLocation + getProcessId() : scratchLocation + "/" +
+                        getProcessId());
+            }
+        }
+        return workingDir;
+    }
+
+    public String getScratchLocation() {
+        if (scratchLocation == null) {
+            if (isUseUserCRPref() &&
+                    userComputeResourcePreference != null &&
+                    isValid(userComputeResourcePreference.getScratchLocation())) {
+                scratchLocation = userComputeResourcePreference.getScratchLocation();
+            } else if (isValid(processModel.getProcessResourceSchedule().getOverrideScratchLocation())) {
+                scratchLocation = processModel.getProcessResourceSchedule().getOverrideScratchLocation();
+            }else {
+                scratchLocation = gatewayComputeResourcePreference.getScratchLocation();
+            }
+        }
+        return scratchLocation;
+    }
+
+    protected UserComputeResourcePreference userComputeResourcePreference() throws AppCatalogException {
+        UserComputeResourcePreference userComputeResourcePreference =
+                getAppCatalog().getUserResourceProfile().getUserComputeResourcePreference(
+                        getProcessModel().getUserName(),
+                        getGatewayId(),
+                        getProcessModel().getComputeResourceId());
+    }
+
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
new file mode 100644
index 0000000..5a3ca31
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
@@ -0,0 +1,81 @@
+package org.apache.airavata.helix.impl.task.submission.task;
+
+import org.apache.airavata.agents.api.AgentAdaptor;
+import org.apache.airavata.agents.api.JobSubmissionOutput;
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
+import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
+import org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask;
+import org.apache.airavata.helix.task.api.TaskHelper;
+import org.apache.airavata.helix.task.api.annotation.TaskDef;
+import org.apache.airavata.model.job.JobModel;
+import org.apache.airavata.model.status.JobState;
+import org.apache.airavata.model.status.JobStatus;
+import org.apache.commons.io.FileUtils;
+import org.apache.helix.task.TaskResult;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.UUID;
+
+@TaskDef(name = "Local Job Submission")
+public class LocalJobSubmissionTask extends JobSubmissionTask {
+
+    @Override
+    public TaskResult onRun(TaskHelper taskHelper) {
+
+        try {
+            GroovyMapData groovyMapData = new GroovyMapData();
+            String jobId = "JOB_ID_" + UUID.randomUUID().toString();
+
+            JobModel jobModel = new JobModel();
+            jobModel.setProcessId(getProcessId());
+            jobModel.setWorkingDir(groovyMapData.getWorkingDirectory());
+            jobModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
+            jobModel.setTaskId(getTaskId());
+            jobModel.setJobId(jobId);
+
+            File jobFile = SubmissionUtil.createJobFile(groovyMapData);
+
+            if (jobFile != null && jobFile.exists()) {
+                jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
+                saveJobModel(jobModel);
+
+                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
+                        getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
+
+                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, jobFile, groovyMapData.getWorkingDirectory());
+
+                JobStatus jobStatus = new JobStatus();
+                jobStatus.setJobState(JobState.SUBMITTED);
+                jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
+                jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                jobModel.setJobStatuses(Arrays.asList(jobStatus));
+
+                saveJobStatus(jobModel);
+
+                jobModel.setExitCode(submissionOutput.getExitCode());
+                jobModel.setStdErr(submissionOutput.getStdErr());
+                jobModel.setStdOut(submissionOutput.getStdOut());
+
+                jobStatus.setJobState(JobState.COMPLETE);
+                jobStatus.setReason("Successfully Completed " + getComputeResourceDescription().getHostName());
+                jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                jobModel.setJobStatuses(Arrays.asList(jobStatus));
+
+                saveJobStatus(jobModel);
+
+                return null;
+            }
+
+            return null;
+        } catch (Exception e) {
+            return null;
+        }
+    }
+
+    @Override
+    public void onCancel() {
+
+    }
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
new file mode 100644
index 0000000..51feff4
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
@@ -0,0 +1,31 @@
+package org.apache.airavata.helix.impl.workflow;
+
+import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.helix.impl.task.EnvSetupTask;
+import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
+import org.apache.airavata.helix.workflow.WorkflowManager;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
+public class SimpleWorkflow {
+
+    public static void main(String[] args) throws Exception {
+
+        EnvSetupTask envSetupTask = new EnvSetupTask();
+        envSetupTask.setWorkingDirectory("/tmp/a");
+
+        DefaultJobSubmissionTask defaultJobSubmissionTask = new DefaultJobSubmissionTask();
+        defaultJobSubmissionTask.setGatewayId("default");
+        defaultJobSubmissionTask.setExperimentId("Clone_of_Mothur-Test1_0c9f627e-2c32-403e-a28a-2a8b10c21c1a");
+        defaultJobSubmissionTask.setProcessId("PROCESS_438a87cc-2dec-4edc-bfeb-31128df91bb6");
+        defaultJobSubmissionTask.setTaskId(UUID.randomUUID().toString());
+
+        List<AbstractTask> tasks = new ArrayList<>();
+        tasks.add(defaultJobSubmissionTask);
+
+        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster", "wm-22", "localhost:2199");
+        workflowManager.launchWorkflow(UUID.randomUUID().toString(), tasks, true);
+    }
+}
diff --git a/modules/helix-spectator/src/main/resources/airavata-server.properties b/modules/helix-spectator/src/main/resources/airavata-server.properties
new file mode 100644
index 0000000..5f47d79
--- /dev/null
+++ b/modules/helix-spectator/src/main/resources/airavata-server.properties
@@ -0,0 +1,334 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+###########################################################################
+#
+#  This properties file provides configuration for all Airavata Services:
+#  API Server, Registry, Workflow Interpreter, GFac, Orchestrator
+#
+###########################################################################
+
+###########################################################################
+#  API Server Registry Configuration
+###########################################################################
+
+#for derby [AiravataJPARegistry]
+#registry.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#registry.jdbc.url=jdbc:derby://localhost:1527/experiment_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+registry.jdbc.driver=org.mariadb.jdbc.Driver
+registry.jdbc.url=jdbc:mariadb://149.165.168.248:3306/experiment_catalog
+registry.jdbc.user=eroma
+registry.jdbc.password=eroma123456
+#FIXME: Probably the following property should be removed.
+start.derby.server.mode=false
+validationQuery=SELECT 1 from CONFIGURATION
+cache.enable=false
+jpa.cache.size=-1
+#jpa.connection.properties=MaxActive=10,MaxIdle=5,MinIdle=2,MaxWait=60000,testWhileIdle=true,testOnBorrow=true
+enable.sharing=true
+
+# Properties for default user mode
+default.registry.user=default-admin
+default.registry.password=123456
+default.registry.password.hash.method=SHA
+default.registry.gateway=default
+super.tenant.gatewayId=default
+
+# Properties for cluster status monitoring
+# cluster status monitoring job repeat time in seconds
+cluster.status.monitoring.enable=false
+cluster.status.monitoring.repeat.time=18000
+
+###########################################################################
+#  Application Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#appcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#appcatalog.jdbc.url=jdbc:derby://localhost:1527/app_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+appcatalog.jdbc.driver=org.mariadb.jdbc.Driver
+appcatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/app_catalog
+appcatalog.jdbc.user=eroma
+appcatalog.jdbc.password=eroma123456
+appcatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+##########################################################################
+#  Replica Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#replicacatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#replicacatalog.jdbc.url=jdbc:derby://localhost:1527/replica_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+replicacatalog.jdbc.driver=org.mariadb.jdbc.Driver
+replicacatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/replica_catalog
+replicacatalog.jdbc.user=eroma
+replicacatalog.jdbc.password=eroma123456
+replicacatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+###########################################################################
+#  Workflow Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#workflowcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#workflowcatalog.jdbc.url=jdbc:derby://localhost:1527/workflow_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+workflowcatalog.jdbc.driver=org.mariadb.jdbc.Driver
+workflowcatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/replica_catalog
+workflowcatalog.jdbc.user=eroma
+workflowcatalog.jdbc.password=eroma123456
+workflowcatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+###########################################################################
+#  Sharing Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#sharingcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#sharingcatalog.jdbc.url=jdbc:derby://localhost:1527/sharing_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+sharingcatalog.jdbc.driver=org.mariadb.jdbc.Driver
+sharingcatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/sharing_catalog
+sharingcatalog.jdbc.user=eroma
+sharingcatalog.jdbc.password=eroma123456
+sharingcatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+###########################################################################
+#  Sharing Registry Server Configuration
+###########################################################################
+sharing_server=org.apache.airavata.sharing.registry.server.SharingRegistryServer
+sharing.registry.server.host=192.168.99.102
+sharing.registry.server.port=7878
+
+###########################################################################
+#  User Profile MongoDB Configuration
+###########################################################################
+userprofile.mongodb.host=localhost
+userprofile.mongodb.port=27017
+
+
+###########################################################################
+#  Server module Configuration
+###########################################################################
+#credential store server should be started before API server
+#This is obsolete property with new script files.
+#servers=credentialstore,apiserver,orchestrator
+
+
+###########################################################################
+#  API Server Configurations
+###########################################################################
+apiserver=org.apache.airavata.api.server.AiravataAPIServer
+apiserver.name=apiserver-node0
+apiserver.host=192.168.99.102
+apiserver.port=8930
+apiserver.min.threads=50
+
+###########################################################################
+#  Orchestrator Server Configurations
+###########################################################################
+orchestrator=org.apache.airavata.orchestrator.server.OrchestratorServer
+orchestrator.server.name=orchestrator-node0
+orchestrator.server.host=192.168.99.102
+orchestrator.server.port=8940
+orchestrator.server.min.threads=50
+job.validators=org.apache.airavata.orchestrator.core.validator.impl.BatchQueueValidator,org.apache.airavata.orchestrator.core.validator.impl.ExperimentStatusValidator
+submitter.interval=10000
+threadpool.size=10
+start.submitter=true
+embedded.mode=true
+enable.validation=true
+
+###########################################################################
+#  Registry Server Configurations
+###########################################################################
+regserver=org.apache.airavata.registry.api.service.RegistryAPIServer
+regserver.server.name=regserver-node0
+regserver.server.host=192.168.99.102
+regserver.server.port=8970
+regserver.server.min.threads=50
+
+###########################################################################
+#  GFac Server Configurations
+###########################################################################
+gfac=org.apache.airavata.gfac.server.GfacServer
+gfac.server.name=gfac-node0
+gfac.server.host=10.0.2.15
+gfac.server.port=8950
+gfac.thread.pool.size=50
+host.scheduler=org.apache.airavata.gfac.impl.DefaultHostScheduler
+
+
+
+###########################################################################
+# Airavata Workflow Interpreter Configurations
+###########################################################################
+workflowserver=org.apache.airavata.api.server.WorkflowServer
+enactment.thread.pool.size=10
+
+#to define custom workflow parser user following property
+#workflow.parser=org.apache.airavata.workflow.core.parser.AiravataWorkflowBuilder
+
+
+
+###########################################################################
+#  Job Scheduler can send informative email messages to you about the status of your job.
+# Specify a string which consists of either the single character "n" (no mail), or one or more
+#  of the characters "a" (send mail when job is aborted), "b" (send mail when job begins),
+# and "e" (send mail when job terminates).  The default is "a" if not specified.
+###########################################################################
+
+job.notification.enable=true
+#Provide comma separated email ids as a string if more than one
+job.notification.emailids=
+job.notification.flags=abe
+
+###########################################################################
+# Credential Store module Configuration
+###########################################################################
+credential.store.keystore.url=/home/pga/master-deployment/keystores/cred_store.jks
+credential.store.keystore.alias=seckey
+credential.store.keystore.password=123456
+credential.store.jdbc.url=jdbc:mariadb://149.165.168.248:3306/credential_store
+credential.store.jdbc.user=eroma
+credential.store.jdbc.password=eroma123456
+credential.store.jdbc.driver=org.mariadb.jdbc.Driver
+credential.store.server.host=192.168.99.102
+credential.store.server.port=8960
+credentialstore=org.apache.airavata.credential.store.server.CredentialStoreServer
+credential.stroe.jdbc.validationQuery=SELECT 1 from CONFIGURATION
+
+# these properties used by credential store email notifications
+email.server=smtp.googlemail.com
+email.server.port=465
+email.user=airavata
+email.password=xxx
+email.ssl=true
+email.from=airavata@apache.org
+
+# SSH PKI key pair or ssh password can be used SSH based sshKeyAuthentication is used.
+# if user specify both password sshKeyAuthentication gets the higher preference
+
+################# ---------- For ssh key pair sshKeyAuthentication ------------------- ################
+#ssh.public.key=/path to public key for ssh
+#ssh.private.key=/path to private key file for ssh
+#ssh.keypass=passphrase for the private key
+#ssh.username=username for ssh connection
+## If you set "yes" for ssh.strict.hostKey.checking, then you must provide known hosts file path
+#ssh.strict.hostKey.checking=yes/no
+#ssh.known.hosts.file=/path to known hosts file
+### Incase of password sshKeyAuthentication.
+#ssh.password=Password for ssh connection
+
+################ ---------- BES Properties ------------------- ###############
+#bes.ca.cert.path=<location>/certificates/cacert.pem
+#bes.ca.key.path=<location>/certificates/cakey.pem
+#bes.ca.key.pass=passphrase
+
+###########################################################################
+# Monitoring module Configuration
+###########################################################################
+
+#This will be the primary monitoring tool which runs in airavata, in future there will be multiple monitoring
+#mechanisms and one would be able to start a monitor
+monitors=org.apache.airavata.gfac.monitor.impl.pull.qstat.QstatMonitor,org.apache.airavata.gfac.monitor.impl.LocalJobMonitor
+
+#These properties will used to enable email base monitoring
+email.based.monitor.host=imap.gmail.com
+email.based.monitor.address=ansibletestairavata@gmail.com
+email.based.monitor.password=ansibletestairavata123
+email.based.monitor.folder.name=INBOX
+# either imaps or pop3
+email.based.monitor.store.protocol=imaps
+#These property will be used to query the email server periodically. value in milliseconds(ms).
+email.based.monitoring.period=10000
+
+###########################################################################
+# AMQP Notification Configuration
+###########################################################################
+#for simple scenarios we can use the guest user
+#rabbitmq.broker.url=amqp://localhost:5672
+#for production scenarios, give url as amqp://userName:password@hostName:portNumber/virtualHost, create user, virtualhost
+# and give permissions, refer: http://blog.dtzq.com/2012/06/rabbitmq-users-and-virtual-hosts.html
+rabbitmq.broker.url=amqp://airavata:123456@192.168.99.102:5672/master
+rabbitmq.status.exchange.name=status_exchange
+rabbitmq.process.exchange.name=process_exchange
+rabbitmq.experiment.exchange.name=experiment_exchange
+durable.queue=false
+prefetch.count=200
+process.launch.queue.name=process.launch.queue
+experiment.launch..queue.name=experiment.launch.queue
+
+###########################################################################
+# Zookeeper Server Configuration
+###########################################################################
+embedded.zk=false
+zookeeper.server.connection=192.168.99.102:2181
+zookeeper.timeout=30000
+
+########################################################################
+## API Security Configuration
+########################################################################
+api.secured=false
+security.manager.class=org.apache.airavata.service.security.KeyCloakSecurityManager
+### TLS related configuration ####
+TLS.enabled=true
+TLS.api.server.port=9930
+TLS.client.timeout=10000
+#### keystore configuration ####
+keystore.path=/home/pga/master-deployment/keystores/airavata.jks
+keystore.password=password
+#### trust store configuration ####
+trust.store=/home/pga/master-deployment/keystores/client_truststore.jks
+trust.store.password=password
+#### authorization cache related configuration ####
+authz.cache.enabled=true
+authz.cache.manager.class=org.apache.airavata.service.security.authzcache.DefaultAuthzCacheManager
+in.memory.cache.size=1000
+
+# Kafka Logging related configuration
+isRunningOnAws=false
+kafka.broker.list=localhost:9092
+kafka.topic.prefix=local
+enable.kafka.logging=false
+
+###########################################################################
+# Profile Service Configuration
+###########################################################################
+profile.service.server.host=192.168.99.102
+profile.service.server.port=8962
+profile_service=org.apache.airavata.service.profile.server.ProfileServiceServer
+# MariaDB properties
+profile.service.jdbc.url=jdbc:mariadb://149.165.168.248:3306/profile_service
+profile.service.jdbc.user=eroma
+profile.service.jdbc.password=eroma123456
+profile.service.jdbc.driver=org.mariadb.jdbc.Driver
+profile.service.validationQuery=SELECT 1
+
+###########################################################################
+# Iam Admin services Configuration
+###########################################################################
+iam.server.url=https://192.168.99.102/auth
+iam.server.super.admin.username=admin
+iam.server.super.admin.password=123456
+
+###########################################################################
+# DB Event Manager Runner
+###########################################################################
+db_event_manager=org.apache.airavata.db.event.manager.DBEventManagerRunner
diff --git a/modules/helix-spectator/src/main/resources/application.properties b/modules/helix-spectator/src/main/resources/application.properties
new file mode 100644
index 0000000..41c5e5f
--- /dev/null
+++ b/modules/helix-spectator/src/main/resources/application.properties
@@ -0,0 +1,3 @@
+zookeeper.connection.url=localhost:2199
+helix.cluster.name=AiravataDemoCluster
+participant.name=all-p1
\ No newline at end of file
diff --git a/modules/helix-spectator/src/main/resources/log4j.properties b/modules/helix-spectator/src/main/resources/log4j.properties
new file mode 100644
index 0000000..e910f32
--- /dev/null
+++ b/modules/helix-spectator/src/main/resources/log4j.properties
@@ -0,0 +1,11 @@
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=INFO, A1
+
+log4j.category.org.apache.helix=WARN
+log4j.category.org.apache.zookeeper=WARN
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 91202d7..bd99a2d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -696,6 +696,8 @@
                 <module>modules/distribution</module>
                 <!--<module>modules/test-suite</module>-->
                 <module>modules/compute-account-provisioning</module>
+                <module>modules/airavata-helix</module>
+                <module>modules/helix-spectator</module>
             </modules>
         </profile>
         <profile>

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 11/17: Thread safe entity manager factory

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit f51f1f16ff687768f7cfade2b92e66b84bd105d8
Author: dimuthu <di...@gmail.com>
AuthorDate: Mon Mar 5 23:08:52 2018 -0500

    Thread safe entity manager factory
---
 .../core/app/catalog/util/AppCatalogJPAUtils.java  | 61 ++++++++++++----------
 1 file changed, 33 insertions(+), 28 deletions(-)

diff --git a/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/util/AppCatalogJPAUtils.java b/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/util/AppCatalogJPAUtils.java
index 2efd419..4fd4685 100644
--- a/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/util/AppCatalogJPAUtils.java
+++ b/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/util/AppCatalogJPAUtils.java
@@ -45,37 +45,42 @@ public class AppCatalogJPAUtils {
     @PersistenceContext(unitName = "appcatalog_data")
     private static EntityManager appCatEntityManager;
 
+    private static Object lock = new Object();
+
     public static EntityManager getEntityManager() throws ApplicationSettingsException {
-        if (factory == null) {
-            String connectionProperties = "DriverClassName=" + readServerProperties(APPCATALOG_JDBC_DRIVER) + "," +
-                    "Url=" + readServerProperties(APPCATALOG_JDBC_URL) + "?autoReconnect=true," +
-                    "Username=" + readServerProperties(APPCATALOG_JDBC_USER) + "," +
-                    "Password=" + readServerProperties(APPCATALOG_JDBC_PWD) +
-                    ",validationQuery=" + readServerProperties(APPCATALOG_VALIDATION_QUERY);
-            System.out.println(connectionProperties);
-            Map<String, String> properties = new HashMap<String, String>();
-            properties.put("openjpa.ConnectionDriverName", "org.apache.commons.dbcp.BasicDataSource");
-            properties.put("openjpa.ConnectionProperties", connectionProperties);
-            properties.put("openjpa.DynamicEnhancementAgent", "true");
-            properties.put("openjpa.RuntimeUnenhancedClasses", "unsupported");
-            // For app catalog, we don't need caching
+        synchronized (lock) {
+            if (factory == null) {
+                String connectionProperties = "DriverClassName=" + readServerProperties(APPCATALOG_JDBC_DRIVER) + "," +
+                        "Url=" + readServerProperties(APPCATALOG_JDBC_URL) + "?autoReconnect=true," +
+                        "Username=" + readServerProperties(APPCATALOG_JDBC_USER) + "," +
+                        "Password=" + readServerProperties(APPCATALOG_JDBC_PWD) +
+                        ",validationQuery=" + readServerProperties(APPCATALOG_VALIDATION_QUERY);
+                System.out.println(connectionProperties);
+                Map<String, String> properties = new HashMap<String, String>();
+                properties.put("openjpa.ConnectionDriverName", "org.apache.commons.dbcp.BasicDataSource");
+                properties.put("openjpa.ConnectionProperties", connectionProperties);
+                properties.put("openjpa.DynamicEnhancementAgent", "true");
+                properties.put("openjpa.RuntimeUnenhancedClasses", "unsupported");
+                //properties.put("openjpa.Multithreaded", "true");
+                // For app catalog, we don't need caching
 //            properties.put("openjpa.DataCache","" + readServerProperties(JPA_CACHE_ENABLED) + "(CacheSize=" + Integer.valueOf(readServerProperties(JPA_CACHE_SIZE)) + ", SoftReferenceSize=0)");
 //            properties.put("openjpa.QueryCache","" + readServerProperties(JPA_CACHE_ENABLED) + "(CacheSize=" + Integer.valueOf(readServerProperties(JPA_CACHE_SIZE)) + ", SoftReferenceSize=0)");
-            properties.put("openjpa.RemoteCommitProvider", "sjvm");
-            properties.put("openjpa.Log", "DefaultLevel=INFO, Runtime=INFO, Tool=INFO, SQL=INFO");
-            properties.put("openjpa.jdbc.SynchronizeMappings", "buildSchema(ForeignKeys=true)");
-            properties.put("openjpa.jdbc.QuerySQLCache", "false");
-            properties.put("openjpa.ConnectionFactoryProperties", "PrettyPrint=true, PrettyPrintLineLength=72, PrintParameters=true, MaxActive=10, MaxIdle=5, MinIdle=2, MaxWait=31536000,  autoReconnect=true");
-            factory = Persistence.createEntityManagerFactory(PERSISTENCE_UNIT_NAME, properties);
-        }
-        // clear cache at entitymangerfactory level
-        if (factory.getCache() != null) {
-            factory.getCache().evictAll();
-        }
-        appCatEntityManager = factory.createEntityManager();
-        // clear the entitymanager cache
-        if (appCatEntityManager != null) {
-            appCatEntityManager.clear();
+                properties.put("openjpa.RemoteCommitProvider", "sjvm");
+                properties.put("openjpa.Log", "DefaultLevel=INFO, Runtime=INFO, Tool=INFO, SQL=INFO");
+                properties.put("openjpa.jdbc.SynchronizeMappings", "buildSchema(ForeignKeys=true)");
+                properties.put("openjpa.jdbc.QuerySQLCache", "false");
+                properties.put("openjpa.ConnectionFactoryProperties", "PrettyPrint=true, PrettyPrintLineLength=72, PrintParameters=true, MaxActive=10, MaxIdle=5, MinIdle=2, MaxWait=31536000,  autoReconnect=true");
+                factory = Persistence.createEntityManagerFactory(PERSISTENCE_UNIT_NAME, properties);
+            }
+            // clear cache at entitymangerfactory level
+            if (factory.getCache() != null) {
+                factory.getCache().evictAll();
+            }
+            appCatEntityManager = factory.createEntityManager();
+            // clear the entitymanager cache
+            if (appCatEntityManager != null) {
+                appCatEntityManager.clear();
+            }
         }
         return appCatEntityManager;
     }

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 17/17: Refactoring

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 71b294ed957cdddf6a08bf32f690c8204fedf3af
Author: dimuthu <di...@gmail.com>
AuthorDate: Wed Mar 7 16:09:50 2018 -0500

    Refactoring
---
 .../airavata/agents/api/JobSubmissionOutput.java   |   9 +
 .../helix/impl/controller/HelixController.java     |  54 +----
 .../helix/impl/participant/GlobalParticipant.java  |  68 +++---
 .../airavata/helix/impl/task/AiravataTask.java     |  69 +++---
 .../airavata/helix/impl/task/TaskContext.java      |  22 +-
 .../airavata/helix/impl/task/env/EnvSetupTask.java |  12 -
 .../helix/impl/task/staging/DataStagingTask.java   |   6 +
 .../impl/task/staging/InputDataStagingTask.java    |   4 -
 .../impl/task/staging/OutputDataStagingTask.java   |  37 +--
 .../task/submission/DefaultJobSubmissionTask.java  | 260 +++++++++------------
 .../task/submission/ForkJobSubmissionTask.java     |  78 ++++---
 .../impl/task/submission/JobSubmissionTask.java    |  38 ++-
 .../helix/impl/workflow/PostWorkflowManager.java   |  29 +--
 .../helix/impl/workflow/PreWorkflowManager.java    |   4 +-
 .../src/main/resources/airavata-server.properties  |  79 +------
 .../helix/core/participant/HelixParticipant.java   |  17 +-
 16 files changed, 314 insertions(+), 472 deletions(-)

diff --git a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java
index 1858826..e1d0a80 100644
--- a/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java
+++ b/modules/airavata-helix/agent-api/src/main/java/org/apache/airavata/agents/api/JobSubmissionOutput.java
@@ -8,6 +8,7 @@ public class JobSubmissionOutput {
     private String jobId;
     private boolean isJobSubmissionFailed;
     private String failureReason;
+    private String description;
 
     public int getExitCode() {
         return exitCode;
@@ -71,4 +72,12 @@ public class JobSubmissionOutput {
         this.failureReason = failureReason;
         return this;
     }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public void setDescription(String description) {
+        this.description = description;
+    }
 }
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
index 11d7129..f5e2137 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
@@ -1,12 +1,11 @@
 package org.apache.airavata.helix.impl.controller;
 
-import org.apache.airavata.helix.core.util.PropertyResolver;
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.helix.controller.HelixControllerMain;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
-import java.io.File;
-import java.io.IOException;
 import java.util.concurrent.CountDownLatch;
 
 /**
@@ -27,18 +26,11 @@ public class HelixController implements Runnable {
     private CountDownLatch startLatch = new CountDownLatch(1);
     private CountDownLatch stopLatch = new CountDownLatch(1);
 
-    public HelixController(String propertyFile, boolean readPropertyFromFile) throws IOException {
-
-        PropertyResolver propertyResolver = new PropertyResolver();
-        if (readPropertyFromFile) {
-            propertyResolver.loadFromFile(new File(propertyFile));
-        } else {
-            propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
-        }
-
-        this.clusterName = propertyResolver.get("helix.cluster.name");
-        this.controllerName = propertyResolver.get("helix.controller.name");
-        this.zkAddress = propertyResolver.get("zookeeper.connection.url");
+    @SuppressWarnings("WeakerAccess")
+    public HelixController() throws ApplicationSettingsException {
+        this.clusterName = ServerSettings.getSetting("helix.cluster.name");
+        this.controllerName = ServerSettings.getSetting("helix.controller.name");
+        this.zkAddress = ServerSettings.getZookeeperConnection();
     }
 
     public void run() {
@@ -64,12 +56,7 @@ public class HelixController implements Runnable {
             logger.info("Controller: " + controllerName + ", has connected to cluster: " + clusterName);
 
             Runtime.getRuntime().addShutdownHook(
-                    new Thread() {
-                        @Override
-                        public void run() {
-                            disconnect();
-                        }
-                    }
+                    new Thread(this::disconnect)
             );
 
         } catch (InterruptedException ex) {
@@ -77,6 +64,7 @@ public class HelixController implements Runnable {
         }
     }
 
+    @SuppressWarnings({"WeakerAccess", "unused"})
     public void stop() {
         stopLatch.countDown();
     }
@@ -92,29 +80,11 @@ public class HelixController implements Runnable {
         try {
 
             logger.info("Starting helix controller");
-            String confDir = null;
-            if (args != null) {
-                for (String arg : args) {
-                    if (arg.startsWith("--confDir=")) {
-                        confDir = arg.substring("--confDir=".length());
-                    }
-                }
-            }
-
-            String propertiesFile = "application.properties";
-            boolean readPropertyFromFile = false;
-
-            if (confDir != null && !confDir.isEmpty()) {
-                propertiesFile = confDir.endsWith(File.separator)? confDir + propertiesFile : confDir + File.separator + propertiesFile;
-                readPropertyFromFile = true;
-            }
-
-            logger.info("Using configuration file " + propertiesFile);
-
-            HelixController helixController = new HelixController(propertiesFile, readPropertyFromFile);
+
+            HelixController helixController = new HelixController();
             helixController.start();
 
-        } catch (IOException e) {
+        } catch (Exception e) {
             logger.error("Failed to start the helix controller", e);
         }
     }
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
index 7dd5c99..7c86f42 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
@@ -1,17 +1,14 @@
 package org.apache.airavata.helix.impl.participant;
 
+import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.helix.core.AbstractTask;
 import org.apache.airavata.helix.core.participant.HelixParticipant;
 import org.apache.airavata.helix.core.support.TaskHelperImpl;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
-import org.apache.helix.task.Task;
-import org.apache.helix.task.TaskCallbackContext;
 import org.apache.helix.task.TaskFactory;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
-import java.io.File;
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -30,26 +27,24 @@ public class GlobalParticipant extends HelixParticipant {
     };
 
     public Map<String, TaskFactory> getTaskFactory() {
-        Map<String, TaskFactory> taskRegistry = new HashMap<String, TaskFactory>();
+        Map<String, TaskFactory> taskRegistry = new HashMap<>();
 
         for (String taskClass : taskClasses) {
-            TaskFactory taskFac = new TaskFactory() {
-                public Task createNewTask(TaskCallbackContext context) {
-                    try {
-                        return AbstractTask.class.cast(Class.forName(taskClass).newInstance())
-                                .setCallbackContext(context)
-                                .setTaskHelper(new TaskHelperImpl());
-                    } catch (InstantiationException | IllegalAccessException e) {
-                        e.printStackTrace();
-                        return null;
-                    } catch (ClassNotFoundException e) {
-                        e.printStackTrace();
-                        return null;
-                    }
+            TaskFactory taskFac = context -> {
+                try {
+                    return AbstractTask.class.cast(Class.forName(taskClass).newInstance())
+                            .setCallbackContext(context)
+                            .setTaskHelper(new TaskHelperImpl());
+                } catch (InstantiationException | IllegalAccessException e) {
+                    logger.error("Failed to initialize the task", e);
+                    return null;
+                } catch (ClassNotFoundException e) {
+                    logger.error("Task class can not be found in the class path", e);
+                    return null;
                 }
             };
 
-            TaskDef taskDef = null;
+            TaskDef taskDef;
             try {
                 taskDef = Class.forName(taskClass).getAnnotation(TaskDef.class);
                 taskRegistry.put(taskDef.name(), taskFac);
@@ -60,34 +55,23 @@ public class GlobalParticipant extends HelixParticipant {
         return taskRegistry;
     }
 
-    public GlobalParticipant(String propertyFile, Class taskClass, String taskTypeName, boolean readPropertyFromFile) throws IOException {
-        super(propertyFile, taskClass, taskTypeName, readPropertyFromFile);
+    @SuppressWarnings("WeakerAccess")
+    public GlobalParticipant(Class taskClass, String taskTypeName) throws ApplicationSettingsException {
+        super(taskClass, taskTypeName);
     }
 
-    public static void main(String args[]) throws IOException {
+    public static void main(String args[]) {
+        logger.info("Starting global participant");
 
-        String confDir = null;
-        if (args != null) {
-            for (String arg : args) {
-                if (arg.startsWith("--confDir=")) {
-                    confDir = arg.substring("--confDir=".length());
-                }
-            }
-        }
-
-        String propertiesFile = "application.properties";
-        boolean readPropertyFromFile = false;
-
-        if (confDir != null && !confDir.isEmpty()) {
-            propertiesFile = confDir.endsWith(File.separator)? confDir + propertiesFile : confDir + File.separator + propertiesFile;
-            readPropertyFromFile = true;
+        GlobalParticipant participant;
+        try {
+            participant = new GlobalParticipant(null, null);
+            Thread t = new Thread(participant);
+            t.start();
+        } catch (Exception e) {
+            logger.error("Failed to start global participant", e);
         }
 
-        logger.info("Using configuration file " + propertiesFile);
-
-        GlobalParticipant participant = new GlobalParticipant(propertiesFile, null, null, readPropertyFromFile);
-        Thread t = new Thread(participant);
-        t.start();
     }
 
 }
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
index 289cfc5..4f6d6ec 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
@@ -53,6 +53,7 @@ public abstract class AiravataTask extends AbstractTask {
     private OutPort nextTask;
 
     protected TaskResult onSuccess(String message) {
+        publishTaskState(TaskState.COMPLETED);
         String successMessage = "Task " + getTaskId() + " completed." + (message != null ? " Message : " + message : "");
         logger.info(successMessage);
         return nextTask.invoke(new TaskResult(TaskResult.Status.COMPLETED, message));
@@ -80,13 +81,15 @@ public abstract class AiravataTask extends AbstractTask {
         getTaskContext().setProcessStatus(status);
 
         ErrorModel errorModel = new ErrorModel();
-        errorModel.setUserFriendlyMessage("GFac Worker throws an exception");
+        errorModel.setUserFriendlyMessage(reason);
         errorModel.setActualErrorMessage(errors.toString());
         errorModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
 
+        publishTaskState(TaskState.FAILED);
         saveAndPublishProcessStatus();
         saveExperimentError(errorModel);
         saveProcessError(errorModel);
+        saveTaskError(errorModel);
         return new TaskResult(fatal ? TaskResult.Status.FATAL_FAILED : TaskResult.Status.FAILED, errorMessage);
     }
 
@@ -97,6 +100,7 @@ public abstract class AiravataTask extends AbstractTask {
         saveAndPublishProcessStatus();
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected void saveAndPublishProcessStatus() {
         try {
             ProcessStatus status = taskContext.getProcessStatus();
@@ -117,6 +121,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected void saveAndPublishTaskStatus() {
         try {
             TaskState state = getTaskContext().getTaskState();
@@ -140,6 +145,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected void saveExperimentError(ErrorModel errorModel) {
         try {
             errorModel.setErrorId(AiravataUtils.getId("EXP_ERROR"));
@@ -150,6 +156,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected void saveProcessError(ErrorModel errorModel) {
         try {
             errorModel.setErrorId(AiravataUtils.getId("PROCESS_ERROR"));
@@ -161,14 +168,15 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
-    protected void saveTaskError(ErrorModel errorModel) throws Exception {
+    @SuppressWarnings("WeakerAccess")
+    protected void saveTaskError(ErrorModel errorModel) {
         try {
             errorModel.setErrorId(AiravataUtils.getId("TASK_ERROR"));
             getExperimentCatalog().add(ExpCatChildDataType.TASK_ERROR, errorModel, getTaskId());
         } catch (RegistryException e) {
             String msg = "expId: " + getExperimentId() + " processId: " + getProcessId() + " taskId: " + getTaskId()
                     + " : - Error while updating task errors";
-            throw new Exception(msg, e);
+            logger.error(msg, e);
         }
     }
 
@@ -191,6 +199,7 @@ public abstract class AiravataTask extends AbstractTask {
             MDC.put("process", getProcessId());
             MDC.put("gateway", getGatewayId());
             MDC.put("task", getTaskId());
+            publishTaskState(TaskState.EXECUTING);
             return onRun(helper, getTaskContext());
         } finally {
             MDC.clear();
@@ -206,6 +215,7 @@ public abstract class AiravataTask extends AbstractTask {
             MDC.put("process", getProcessId());
             MDC.put("gateway", getGatewayId());
             MDC.put("task", getTaskId());
+            publishTaskState(TaskState.CANCELED);
             onCancel(getTaskContext());
         } finally {
             MDC.clear();
@@ -231,22 +241,21 @@ public abstract class AiravataTask extends AbstractTask {
             this.computeResourceDescription = getAppCatalog().getComputeResource().getComputeResource(getProcessModel()
                     .getComputeResourceId());
 
-            TaskContext.TaskContextBuilder taskContextBuilder = new TaskContext.TaskContextBuilder(getProcessId(), getGatewayId(), getTaskId());
-            taskContextBuilder.setAppCatalog(getAppCatalog());
-            taskContextBuilder.setExperimentCatalog(getExperimentCatalog());
-            taskContextBuilder.setProcessModel(getProcessModel());
-            taskContextBuilder.setStatusPublisher(getStatusPublisher());
-
-            taskContextBuilder.setGatewayResourceProfile(appCatalog.getGatewayProfile().getGatewayProfile(gatewayId));
-            taskContextBuilder.setGatewayComputeResourcePreference(
+            TaskContext.TaskContextBuilder taskContextBuilder = new TaskContext.TaskContextBuilder(getProcessId(), getGatewayId(), getTaskId())
+                    .setAppCatalog(getAppCatalog())
+                    .setExperimentCatalog(getExperimentCatalog())
+                    .setProcessModel(getProcessModel())
+                    .setStatusPublisher(getStatusPublisher())
+                    .setGatewayResourceProfile(appCatalog.getGatewayProfile().getGatewayProfile(gatewayId))
+                    .setGatewayComputeResourcePreference(
                             appCatalog.getGatewayProfile()
-                                    .getComputeResourcePreference(gatewayId, processModel.getComputeResourceId()));
-            taskContextBuilder.setGatewayStorageResourcePreference(
+                                    .getComputeResourcePreference(gatewayId, processModel.getComputeResourceId()))
+                    .setGatewayStorageResourcePreference(
                             appCatalog.getGatewayProfile()
                                     .getStoragePreference(gatewayId, processModel.getStorageResourceId()));
 
             this.taskContext = taskContextBuilder.build();
-            logger.info("Task " + taskName + " intitialized");
+            logger.info("Task " + taskName + " initialized");
         } catch (Exception e) {
             logger.error("Error occurred while initializing the task " + getTaskId() + " of experiment " + getExperimentId(), e);
            throw new RuntimeException("Error occurred while initializing the task " + getTaskId() + " of experiment " + getExperimentId(), e);
@@ -259,19 +268,25 @@ public abstract class AiravataTask extends AbstractTask {
         return appCatalog;
     }
 
-    protected void publishTaskState(TaskState ts) throws RegistryException {
-
-        TaskStatus taskStatus = new TaskStatus();
-        taskStatus.setState(ts);
-        taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-        experimentCatalog.add(ExpCatChildDataType.TASK_STATUS, taskStatus, getTaskId());
-        TaskIdentifier identifier = new TaskIdentifier(getTaskId(),
-                getProcessId(), getExperimentId(), getGatewayId());
-        TaskStatusChangeEvent taskStatusChangeEvent = new TaskStatusChangeEvent(ts,
-                identifier);
-        MessageContext msgCtx = new MessageContext(taskStatusChangeEvent, MessageType.TASK, AiravataUtils.getId
-                (MessageType.TASK.name()), getGatewayId());
-        msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+    @SuppressWarnings("WeakerAccess")
+    protected void publishTaskState(TaskState ts) {
+
+        try {
+            TaskStatus taskStatus = new TaskStatus();
+            taskStatus.setState(ts);
+            taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+            experimentCatalog.add(ExpCatChildDataType.TASK_STATUS, taskStatus, getTaskId());
+            TaskIdentifier identifier = new TaskIdentifier(getTaskId(),
+                    getProcessId(), getExperimentId(), getGatewayId());
+            TaskStatusChangeEvent taskStatusChangeEvent = new TaskStatusChangeEvent(ts,
+                    identifier);
+            MessageContext msgCtx = new MessageContext(taskStatusChangeEvent, MessageType.TASK, AiravataUtils.getId
+                    (MessageType.TASK.name()), getGatewayId());
+            msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
+            statusPublisher.publish(msgCtx);
+        } catch (Exception e) {
+            logger.error("Failed to publish task status " + (ts != null ? ts.name(): "null") +" of task " + getTaskId());
+        }
     }
 
     protected ComputeResourceDescription getComputeResourceDescription() {
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
index 6be1d36..0e6a3cc 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
@@ -716,21 +716,11 @@ public class TaskContext {
                             }
                         }
                     }else {
-                        Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
-                            @Override
-                            public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
-                                return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
-                            }
-                        });
+                        jobSubmissionInterfaces.sort(Comparator.comparingInt(JobSubmissionInterface::getPriorityOrder));
                     }
                 }
                 interfaces = orderedInterfaces.get(preferredJobSubmissionProtocol);
-                Collections.sort(interfaces, new Comparator<JobSubmissionInterface>() {
-                    @Override
-                    public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
-                        return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
-                    }
-                });
+                interfaces.sort(Comparator.comparingInt(JobSubmissionInterface::getPriorityOrder));
             } else {
                 throw new AppCatalogException("Compute resource should have at least one job submission interface defined...");
             }
@@ -740,6 +730,7 @@ public class TaskContext {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     public TaskModel getCurrentTaskModel() {
         return getTaskMap().get(taskId);
     }
@@ -763,6 +754,7 @@ public class TaskContext {
         private StoragePreference gatewayStorageResourcePreference;
         private ProcessModel processModel;
 
+        @SuppressWarnings("WeakerAccess")
         public TaskContextBuilder(String processId, String gatewayId, String taskId) throws Exception {
             if (notValid(processId) || notValid(gatewayId) || notValid(taskId)) {
                 throwError("Process Id, Gateway Id and Task Id must be not null");
@@ -826,9 +818,9 @@ public class TaskContext {
             if (notValid(experimentCatalog)) {
                 throwError("Invalid Experiment catalog");
             }
-            //if (notValid(statusPublisher)) {
-              //  throwError("Invalid Status Publisher");
-            //}
+            if (notValid(statusPublisher)) {
+                throwError("Invalid Status Publisher");
+            }
 
             TaskContext ctx = new TaskContext(processId, gatewayId, taskId);
             ctx.setAppCatalog(appCatalog);
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
index 6eb1722..84adbcd 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
@@ -6,8 +6,6 @@ import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.airavata.model.status.ProcessState;
-import org.apache.airavata.model.status.TaskState;
-import org.apache.airavata.registry.cpi.RegistryException;
 import org.apache.helix.task.TaskResult;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
@@ -20,9 +18,7 @@ public class EnvSetupTask extends AiravataTask {
     @Override
     public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
         try {
-
             saveAndPublishProcessStatus(ProcessState.CONFIGURING_WORKSPACE);
-            publishTaskState(TaskState.EXECUTING);
             AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
                     getTaskContext().getGatewayId(),
                     getTaskContext().getComputeResourceId(),
@@ -32,17 +28,9 @@ public class EnvSetupTask extends AiravataTask {
 
             logger.info("Creating directory " + getTaskContext().getWorkingDir() + " on compute resource " + getTaskContext().getComputeResourceId());
             adaptor.createDirectory(getTaskContext().getWorkingDir());
-            publishTaskState(TaskState.COMPLETED);
             return onSuccess("Envi setup task successfully completed " + getTaskId());
 
         } catch (Exception e) {
-            try {
-                publishTaskState(TaskState.FAILED);
-            } catch (RegistryException e1) {
-                logger.error("Task failed to publish task status", e1);
-
-                // ignore silently
-            }
             return onFail("Failed to setup environment of task " + getTaskId(), true, e);
         }
     }
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
index 76b4cb3..3220064 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
@@ -15,8 +15,10 @@ import org.apache.commons.io.FileUtils;
 import java.io.File;
 import java.io.IOException;
 
+@SuppressWarnings("WeakerAccess")
 public abstract class DataStagingTask extends AiravataTask {
 
+    @SuppressWarnings("WeakerAccess")
     protected DataStagingTaskModel getDataStagingTaskModel() throws TaskOnFailException {
         try {
             Object subTaskModel = getTaskContext().getSubTaskModel();
@@ -30,6 +32,7 @@ public abstract class DataStagingTask extends AiravataTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected StorageResourceDescription getStorageResource() throws TaskOnFailException {
         try {
             StorageResourceDescription storageResource = getTaskContext().getStorageResource();
@@ -42,6 +45,7 @@ public abstract class DataStagingTask extends AiravataTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected StorageResourceAdaptor getStorageAdaptor(AdaptorSupport adaptorSupport) throws TaskOnFailException {
         try {
             StorageResourceAdaptor storageResourceAdaptor = adaptorSupport.fetchStorageAdaptor(
@@ -61,6 +65,7 @@ public abstract class DataStagingTask extends AiravataTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected AgentAdaptor getComputeResourceAdaptor(AdaptorSupport adaptorSupport) throws TaskOnFailException {
         try {
             return adaptorSupport.fetchAdaptor(
@@ -75,6 +80,7 @@ public abstract class DataStagingTask extends AiravataTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     protected String getLocalDataPath(String fileName) throws TaskOnFailException {
         String localDataPath = ServerSettings.getLocalDataLocation();
         localDataPath = (localDataPath.endsWith(File.separator) ? localDataPath : localDataPath + File.separator);
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
index de2aeac..f8d98cf 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
@@ -7,7 +7,6 @@ import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.helix.impl.task.TaskOnFailException;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
-import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
 import org.apache.airavata.model.application.io.InputDataObjectType;
 import org.apache.airavata.model.status.ProcessState;
 import org.apache.airavata.model.task.DataStagingTaskModel;
@@ -49,9 +48,6 @@ public class InputDataStagingTask extends DataStagingTask {
                 throw new TaskOnFailException(message, true, null);
             }
 
-            // Fetch and validate storage resource
-            StorageResourceDescription storageResource = getStorageResource();
-
             // Fetch and validate source and destination URLS
             URI sourceURI;
             URI destinationURI;
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
index 7d657cb..88698c0 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
@@ -20,7 +20,7 @@ import org.apache.log4j.Logger;
 import java.io.File;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
 @TaskDef(name = "Output Data Staging Task")
@@ -65,6 +65,7 @@ public class OutputDataStagingTask extends DataStagingTask {
                         sourceURI.getPath().length());
 
                 if (dataStagingTaskModel.getDestination().startsWith("dummy")) {
+
                     String inputPath  = getTaskContext().getStorageFileSystemRootLocation();
                     inputPath = (inputPath.endsWith(File.separator) ? inputPath : inputPath + File.separator);
                     String experimentDataDir = getProcessModel().getExperimentDataDir();
@@ -110,7 +111,7 @@ public class OutputDataStagingTask extends DataStagingTask {
                 String sourceParentPath = (new File(sourceURI.getPath())).getParentFile().getPath();
 
                 logger.debug("Destination parent path " + destParentPath + ", source parent path " + sourceParentPath);
-                List<String> fileNames = null;
+                List<String> fileNames;
                 try {
                     fileNames = adaptor.getFileNameFromExtension(sourceFileName, sourceParentPath);
 
@@ -133,11 +134,14 @@ public class OutputDataStagingTask extends DataStagingTask {
                     }
 
                     //Wildcard support is only enabled for output data staging
+                    assert processOutput != null;
                     processOutput.setName(sourceFileName);
 
                     try {
-                        getTaskContext().getExperimentCatalog().add(ExpCatChildDataType.EXPERIMENT_OUTPUT, Arrays.asList(processOutput), getExperimentId());
-                        getTaskContext().getExperimentCatalog().add(ExpCatChildDataType.PROCESS_OUTPUT, Arrays.asList(processOutput), getProcessId());
+                        getTaskContext().getExperimentCatalog().add(ExpCatChildDataType.EXPERIMENT_OUTPUT,
+                                Collections.singletonList(processOutput), getExperimentId());
+                        getTaskContext().getExperimentCatalog().add(ExpCatChildDataType.PROCESS_OUTPUT,
+                                Collections.singletonList(processOutput), getProcessId());
                     } catch (RegistryException e) {
                         throw new TaskOnFailException("Failed to update experiment or process outputs for task " + getTaskId(), true, e);
                     }
@@ -145,11 +149,12 @@ public class OutputDataStagingTask extends DataStagingTask {
                     logger.info("Transferring file " + sourceFileName);
                     transferFile(sourceURI, destinationURI, sourceFileName, adaptor, storageResourceAdaptor);
                 }
+                return onSuccess("Output data staging task " + getTaskId() + " successfully completed");
 
             } else {
                 // Downloading input file from the storage resource
                 transferFile(sourceURI, destinationURI, sourceFileName, adaptor, storageResourceAdaptor);
-                return onSuccess("Input data staging task " + getTaskId() + " successfully completed");
+                return onSuccess("Output data staging task " + getTaskId() + " successfully completed");
             }
 
         } catch (TaskOnFailException e) {
@@ -164,8 +169,6 @@ public class OutputDataStagingTask extends DataStagingTask {
             logger.error("Unknown error while executing output data staging task " + getTaskId(), e);
             return onFail("Unknown error while executing output data staging task " + getTaskId(), false,  e);
         }
-
-        return null;
     }
 
     private void transferFile(URI sourceURI, URI destinationURI, String fileName, AgentAdaptor adaptor,
@@ -192,26 +195,6 @@ public class OutputDataStagingTask extends DataStagingTask {
         }
     }
 
-    public URI getDestinationURIFromDummy(String hostName, String inputPath, String fileName) throws URISyntaxException {
-        String experimentDataDir = getProcessModel().getExperimentDataDir();
-        String filePath;
-        if(experimentDataDir != null && !experimentDataDir.isEmpty()) {
-            if(!experimentDataDir.endsWith(File.separator)){
-                experimentDataDir += File.separator;
-            }
-            if (experimentDataDir.startsWith(File.separator)) {
-                filePath = experimentDataDir + fileName;
-            } else {
-                filePath = inputPath + experimentDataDir + fileName;
-            }
-        } else {
-            filePath = inputPath + getProcessId() + File.separator + fileName;
-        }
-        //FIXME
-        return new URI("file", getTaskContext().getStorageResourceLoginUserName(), hostName, 22, filePath, null, null);
-
-    }
-
     @Override
     public void onCancel(TaskContext taskContext) {
 
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
index 82316f0..6d64273 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
@@ -19,7 +19,7 @@ import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
 import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
 @TaskDef(name = "Default Job Submission")
@@ -27,7 +27,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
 
     private static final Logger logger = LogManager.getLogger(DefaultJobSubmissionTask.class);
 
-    public static final String DEFAULT_JOB_ID = "DEFAULT_JOB_ID";
+    private static final String DEFAULT_JOB_ID = "DEFAULT_JOB_ID";
 
     @Override
     public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
@@ -45,171 +45,137 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
             jobModel.setJobName(mapData.getJobName());
             jobModel.setJobDescription("Sample description");
 
-            if (mapData != null) {
-                //jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
-                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
-                        getTaskContext().getGatewayId(),
-                        getTaskContext().getComputeResourceId(),
-                        getTaskContext().getJobSubmissionProtocol().name(),
-                        getTaskContext().getComputeResourceCredentialToken(),
-                        getTaskContext().getComputeResourceLoginUserName());
-
-                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
-
-                jobModel.setExitCode(submissionOutput.getExitCode());
-                jobModel.setStdErr(submissionOutput.getStdErr());
-                jobModel.setStdOut(submissionOutput.getStdOut());
-
-                String jobId = submissionOutput.getJobId();
-
-                if (submissionOutput.getExitCode() != 0 || submissionOutput.isJobSubmissionFailed()) {
-                    jobModel.setJobId(DEFAULT_JOB_ID);
-                    if (submissionOutput.isJobSubmissionFailed()) {
-                        List<JobStatus> statusList = new ArrayList<>();
-                        statusList.add(new JobStatus(JobState.FAILED));
-                        statusList.get(0).setReason(submissionOutput.getFailureReason());
-                        jobModel.setJobStatuses(statusList);
-                        saveJobModel(jobModel);
-                        logger.error("expId: " + getExperimentId() + ", processid: " + getProcessId()+ ", taskId: " +
-                                getTaskId() + " :- Job submission failed for job name " + jobModel.getJobName()
-                                + ". Exit code : " + submissionOutput.getExitCode() + ", Submission failed : "
-                                + submissionOutput.isJobSubmissionFailed());
+            AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                    getTaskContext().getGatewayId(),
+                    getTaskContext().getComputeResourceId(),
+                    getTaskContext().getJobSubmissionProtocol().name(),
+                    getTaskContext().getComputeResourceCredentialToken(),
+                    getTaskContext().getComputeResourceLoginUserName());
 
-                        ErrorModel errorModel = new ErrorModel();
-                        errorModel.setUserFriendlyMessage(submissionOutput.getFailureReason());
-                        errorModel.setActualErrorMessage(submissionOutput.getFailureReason());
-                        saveExperimentError(errorModel);
-                        saveProcessError(errorModel);
-                        saveTaskError(errorModel);
-                        //taskStatus.setState(TaskState.FAILED);
-                        //taskStatus.setReason("Job submission command didn't return a jobId");
-                        //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                        //taskContext.setTaskStatus(taskStatus);
-                        logger.error("Standard error message : " + submissionOutput.getStdErr());
-                        logger.error("Standard out message : " + submissionOutput.getStdOut());
-                        return onFail("Job submission command didn't return a jobId", false, null);
+            JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
+
+            jobModel.setJobDescription(submissionOutput.getDescription());
+            jobModel.setExitCode(submissionOutput.getExitCode());
+            jobModel.setStdErr(submissionOutput.getStdErr());
+            jobModel.setStdOut(submissionOutput.getStdOut());
+
+            String jobId = submissionOutput.getJobId();
+
+            if (submissionOutput.getExitCode() != 0 || submissionOutput.isJobSubmissionFailed()) {
+
+                jobModel.setJobId(DEFAULT_JOB_ID);
+                if (submissionOutput.isJobSubmissionFailed()) {
+                    List<JobStatus> statusList = new ArrayList<>();
+                    statusList.add(new JobStatus(JobState.FAILED));
+                    statusList.get(0).setReason(submissionOutput.getFailureReason());
+                    jobModel.setJobStatuses(statusList);
+                    saveJobModel(jobModel);
+                    logger.error("expId: " + getExperimentId() + ", processid: " + getProcessId()+ ", taskId: " +
+                            getTaskId() + " :- Job submission failed for job name " + jobModel.getJobName()
+                            + ". Exit code : " + submissionOutput.getExitCode() + ", Submission failed : "
+                            + submissionOutput.isJobSubmissionFailed());
+
+                    logger.error("Standard error message : " + submissionOutput.getStdErr());
+                    logger.error("Standard out message : " + submissionOutput.getStdOut());
+                    return onFail("Job submission command didn't return a jobId. Reason " + submissionOutput.getFailureReason(),
+                            false, null);
+
+                } else {
 
+                    String msg;
+                    saveJobModel(jobModel);
+                    ErrorModel errorModel = new ErrorModel();
+                    if (submissionOutput.getExitCode() != Integer.MIN_VALUE) {
+                        msg = "expId:" + getExperimentId() + ", processId:" + getProcessId() + ", taskId: " + getTaskId() +
+                                " return non zero exit code:" + submissionOutput.getExitCode() + "  for JobName:" + jobModel.getJobName() +
+                                ", with failure reason : " + submissionOutput.getFailureReason()
+                                + " Hence changing job state to Failed." ;
+                        errorModel.setActualErrorMessage(submissionOutput.getFailureReason());
                     } else {
-                        String msg;
-                        saveJobModel(jobModel);
-                        ErrorModel errorModel = new ErrorModel();
-                        if (submissionOutput.getExitCode() != Integer.MIN_VALUE) {
-                            msg = "expId:" + getExperimentId() + ", processId:" + getProcessId() + ", taskId: " + getTaskId() +
-                                    " return non zero exit code:" + submissionOutput.getExitCode() + "  for JobName:" + jobModel.getJobName() +
-                                    ", with failure reason : " + submissionOutput.getFailureReason()
-                                    + " Hence changing job state to Failed." ;
-                            errorModel.setActualErrorMessage(submissionOutput.getFailureReason());
-                        } else {
-                            msg = "expId:" + getExperimentId() + ", processId:" + getProcessId() + ", taskId: " + getTaskId() +
-                                    " doesn't  return valid job submission exit code for JobName:" + jobModel.getJobName() +
-                                    ", with failure reason : stdout ->" + submissionOutput.getStdOut() +
-                                    " stderr -> " + submissionOutput.getStdErr() + " Hence changing job state to Failed." ;
-                            errorModel.setActualErrorMessage(msg);
-                        }
-                        logger.error(msg);
-                        errorModel.setUserFriendlyMessage(msg);
-                        saveExperimentError(errorModel);
-                        saveProcessError(errorModel);
-                        saveTaskError(errorModel);
-                        //taskStatus.setState(TaskState.FAILED);
-                        //taskStatus.setReason(msg);
-                        //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                        //taskContext.setTaskStatus(taskStatus);
-                        return onFail(msg, false, null);
+                        msg = "expId:" + getExperimentId() + ", processId:" + getProcessId() + ", taskId: " + getTaskId() +
+                                " doesn't  return valid job submission exit code for JobName:" + jobModel.getJobName() +
+                                ", with failure reason : stdout ->" + submissionOutput.getStdOut() +
+                                " stderr -> " + submissionOutput.getStdErr() + " Hence changing job state to Failed." ;
+                        errorModel.setActualErrorMessage(msg);
                     }
+                    logger.error(msg);
+                    return onFail(msg, false, null);
 
-                    //TODO save task status??
-                } else if (jobId != null && !jobId.isEmpty()) {
-                    logger.info("Received job id " + jobId + " from compute resource");
-                    jobModel.setJobId(jobId);
-                    saveJobModel(jobModel);
+                }
+
+            } else if (jobId != null && !jobId.isEmpty()) {
+
+                logger.info("Received job id " + jobId + " from compute resource");
+                jobModel.setJobId(jobId);
+                saveJobModel(jobModel);
+
+                JobStatus jobStatus = new JobStatus();
+                jobStatus.setJobState(JobState.SUBMITTED);
+                jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
+                jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                jobModel.setJobStatuses(Collections.singletonList(jobStatus));
+                saveAndPublishJobStatus(jobModel);
 
-                    JobStatus jobStatus = new JobStatus();
-                    jobStatus.setJobState(JobState.SUBMITTED);
-                    jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
+                if (verifyJobSubmissionByJobId(adaptor, jobId)) {
+                    jobStatus.setJobState(JobState.QUEUED);
+                    jobStatus.setReason("Verification step succeeded");
                     jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                    jobModel.setJobStatuses(Arrays.asList(jobStatus));
+                    jobModel.setJobStatuses(Collections.singletonList(jobStatus));
                     saveAndPublishJobStatus(jobModel);
+                    createMonitoringNode(jobId);
+                }
+
+                if (getComputeResourceDescription().isGatewayUsageReporting()){
+                    String loadCommand = getComputeResourceDescription().getGatewayUsageModuleLoadCommand();
+                    String usageExecutable = getComputeResourceDescription().getGatewayUsageExecutable();
+                    ExperimentModel experiment = (ExperimentModel)getExperimentCatalog().get(ExperimentCatalogModelType.EXPERIMENT, getExperimentId());
+                    String username = experiment.getUserName() + "@" + getTaskContext().getGatewayComputeResourcePreference().getUsageReportingGatewayId();
+                    RawCommandInfo rawCommandInfo = new RawCommandInfo(loadCommand + " && " + usageExecutable + " -gateway_user " +  username  +
+                            " -submit_time \"`date '+%F %T %:z'`\"  -jobid " + jobId );
+                    adaptor.executeCommand(rawCommandInfo.getRawCommand(), null);
+                }
+
+                return onSuccess("Submitted job to compute resource");
+
+            } else {
 
-                    if (verifyJobSubmissionByJobId(adaptor, jobId)) {
+                int verificationTryCount = 0;
+                while (verificationTryCount++ < 3) {
+                    String verifyJobId = verifyJobSubmission(adaptor, jobModel.getJobName(), getTaskContext().getComputeResourceLoginUserName());
+                    if (verifyJobId != null && !verifyJobId.isEmpty()) {
+                        // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
+                        jobId = verifyJobId;
+                        jobModel.setJobId(jobId);
+                        saveJobModel(jobModel);
+                        JobStatus jobStatus = new JobStatus();
                         jobStatus.setJobState(JobState.QUEUED);
                         jobStatus.setReason("Verification step succeeded");
                         jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                        jobModel.setJobStatuses(Arrays.asList(jobStatus));
+                        jobModel.setJobStatuses(Collections.singletonList(jobStatus));
                         saveAndPublishJobStatus(jobModel);
-                        createMonitoringNode(jobId);
-                    }
-
-                    if (getComputeResourceDescription().isGatewayUsageReporting()){
-                        String loadCommand = getComputeResourceDescription().getGatewayUsageModuleLoadCommand();
-                        String usageExecutable = getComputeResourceDescription().getGatewayUsageExecutable();
-                        ExperimentModel experiment = (ExperimentModel)getExperimentCatalog().get(ExperimentCatalogModelType.EXPERIMENT, getExperimentId());
-                        String username = experiment.getUserName() + "@" + getTaskContext().getGatewayComputeResourcePreference().getUsageReportingGatewayId();
-                        RawCommandInfo rawCommandInfo = new RawCommandInfo(loadCommand + " && " + usageExecutable + " -gateway_user " +  username  +
-                                " -submit_time \"`date '+%F %T %:z'`\"  -jobid " + jobId );
-                        adaptor.executeCommand(rawCommandInfo.getRawCommand(), null);
-                    }
-                    //taskStatus = new TaskStatus(TaskState.COMPLETED);
-                    //taskStatus.setReason("Submitted job to compute resource");
-                    //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-
-                    return onSuccess("Submitted job to compute resource");
-                } else {
-                    int verificationTryCount = 0;
-                    while (verificationTryCount++ < 3) {
-                        String verifyJobId = verifyJobSubmission(adaptor, jobModel.getJobName(), getTaskContext().getComputeResourceLoginUserName());
-                        if (verifyJobId != null && !verifyJobId.isEmpty()) {
-                            // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
-                            jobId = verifyJobId;
-                            jobModel.setJobId(jobId);
-                            saveJobModel(jobModel);
-                            JobStatus jobStatus = new JobStatus();
-                            jobStatus.setJobState(JobState.QUEUED);
-                            jobStatus.setReason("Verification step succeeded");
-                            jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                            jobModel.setJobStatuses(Arrays.asList(jobStatus));
-                            saveAndPublishJobStatus(jobModel);
-                            //taskStatus.setState(TaskState.COMPLETED);
-                            //taskStatus.setReason("Submitted job to compute resource");
-                            //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                            break;
-                        }
-                        logger.info("Verify step return invalid jobId, retry verification step in " + (verificationTryCount * 10) + " secs");
-                        Thread.sleep(verificationTryCount * 10000);
+                        logger.info("Job id " + verifyJobId + " verification succeeded");
+                        break;
                     }
+                    logger.info("Verify step return invalid jobId, retry verification step in " + (verificationTryCount * 10) + " secs");
+                    Thread.sleep(verificationTryCount * 10000);
                 }
+            }
 
-                if (jobId == null || jobId.isEmpty()) {
-                    jobModel.setJobId(DEFAULT_JOB_ID);
-                    saveJobModel(jobModel);
-                    String msg = "expId:" + getExperimentId() + " Couldn't find " +
-                            "remote jobId for JobName:" + jobModel.getJobName() + ", both submit and verify steps " +
-                            "doesn't return a valid JobId. " + "Hence changing experiment state to Failed";
-                    logger.error(msg);
-                    ErrorModel errorModel = new ErrorModel();
-                    errorModel.setUserFriendlyMessage(msg);
-                    errorModel.setActualErrorMessage(msg);
-                    saveExperimentError(errorModel);
-                    saveProcessError(errorModel);
-                    saveTaskError(errorModel);
-                    //taskStatus.setState(TaskState.FAILED);
-                    //taskStatus.setReason("Couldn't find job id in both submitted and verified steps");
-                    //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                    return onFail("Couldn't find job id in both submitted and verified steps", false, null);
-                } else {
-                    //GFacUtils.saveJobModel(processContext, jobModel);
-                }
-
-            }  else {
-                return onFail("Job data is null", true, null);
-                //  taskStatus.setReason("JobFile is null");
-                //taskStatus.setState(TaskState.FAILED);
+            if (jobId == null || jobId.isEmpty()) {
+                jobModel.setJobId(DEFAULT_JOB_ID);
+                saveJobModel(jobModel);
+                String msg = "expId:" + getExperimentId() + " Couldn't find " +
+                        "remote jobId for JobName:" + jobModel.getJobName() + ", both submit and verify steps " +
+                        "doesn't return a valid JobId. " + "Hence changing experiment state to Failed";
+                logger.error(msg);
+                return onFail("Couldn't find job id in both submitted and verified steps. " + msg, false, null);
+            } else {
+                return onSuccess("Submitted job to compute resource after retry");
             }
+
         } catch (Exception e) {
             return onFail("Task failed due to unexpected issue", false, e);
         }
-        // TODO get rid of this
-        return onFail("Task moved to an unknown state", false, null);
     }
 
     private boolean verifyJobSubmissionByJobId(AgentAdaptor agentAdaptor, String jobID) throws Exception {
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
index 06ce0ea..d9415ac 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
@@ -12,12 +12,17 @@ import org.apache.airavata.model.job.JobModel;
 import org.apache.airavata.model.status.JobState;
 import org.apache.airavata.model.status.JobStatus;
 import org.apache.helix.task.TaskResult;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
 
-import java.util.Arrays;
+import java.util.Collections;
 
 @TaskDef(name = "Fork Job Submission")
+@SuppressWarnings("unused")
 public class ForkJobSubmissionTask extends JobSubmissionTask {
 
+    private static final Logger logger = LogManager.getLogger(ForkJobSubmissionTask.class);
+
     @Override
     public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
 
@@ -31,45 +36,44 @@ public class ForkJobSubmissionTask extends JobSubmissionTask {
             jobModel.setTaskId(getTaskId());
             jobModel.setJobName(mapData.getJobName());
 
-            if (mapData != null) {
-                //jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
-                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
-                        getTaskContext().getGatewayId(),
-                        getTaskContext().getComputeResourceId(),
-                        getTaskContext().getJobSubmissionProtocol().name(),
-                        getTaskContext().getComputeResourceCredentialToken(),
-                        getTaskContext().getComputeResourceLoginUserName());
-
-                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
-
-                jobModel.setExitCode(submissionOutput.getExitCode());
-                jobModel.setStdErr(submissionOutput.getStdErr());
-                jobModel.setStdOut(submissionOutput.getStdOut());
-
-                String jobId = submissionOutput.getJobId();
-
-                if (jobId != null && !jobId.isEmpty()) {
-                    jobModel.setJobId(jobId);
-                    saveJobModel(jobModel);
-                    JobStatus jobStatus = new JobStatus();
-                    jobStatus.setJobState(JobState.SUBMITTED);
-                    jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
-                    jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
-                    jobModel.setJobStatuses(Arrays.asList(jobStatus));
-                    saveAndPublishJobStatus(jobModel);
-
-                    return null;
-                } else {
-                    String msg = "expId:" + getExperimentId() + " Couldn't find remote jobId for JobName:" +
-                            jobModel.getJobName() + ", both submit and verify steps doesn't return a valid JobId. " +
-                            "Hence changing experiment state to Failed";
-                }
-
+            AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                    getTaskContext().getGatewayId(),
+                    getTaskContext().getComputeResourceId(),
+                    getTaskContext().getJobSubmissionProtocol().name(),
+                    getTaskContext().getComputeResourceCredentialToken(),
+                    getTaskContext().getComputeResourceLoginUserName());
+
+            JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
+
+            jobModel.setJobDescription(submissionOutput.getDescription());
+            jobModel.setExitCode(submissionOutput.getExitCode());
+            jobModel.setStdErr(submissionOutput.getStdErr());
+            jobModel.setStdOut(submissionOutput.getStdOut());
+
+            String jobId = submissionOutput.getJobId();
+
+            if (jobId != null && !jobId.isEmpty()) {
+                jobModel.setJobId(jobId);
+                saveJobModel(jobModel);
+                JobStatus jobStatus = new JobStatus();
+                jobStatus.setJobState(JobState.SUBMITTED);
+                jobStatus.setReason("Successfully Submitted to " + getComputeResourceDescription().getHostName());
+                jobStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+                jobModel.setJobStatuses(Collections.singletonList(jobStatus));
+                saveAndPublishJobStatus(jobModel);
+
+                return onSuccess("Job submitted successfully");
+            } else {
+                String msg = "expId:" + getExperimentId() + " Couldn't find remote jobId for JobName:" +
+                        jobModel.getJobName() + ", both submit and verify steps doesn't return a valid JobId. " +
+                        "Hence changing experiment state to Failed";
+
+                return onFail(msg, true, null);
             }
-            return null;
 
         } catch (Exception e) {
-            return null;
+            logger.error("Unknown error while submitting job", e);
+            return onFail("Unknown error while submitting job", true, e);
         }
     }
 
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
index 7bf5034..a204ee1 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
@@ -52,23 +52,32 @@ public abstract class JobSubmissionTask extends AiravataTask {
         }
     }
 
+    @SuppressWarnings("WeakerAccess")
     public CuratorFramework getCuratorClient() {
         return curatorClient;
     }
 
     // TODO perform exception handling
+    @SuppressWarnings("WeakerAccess")
     protected void createMonitoringNode(String jobId) throws Exception {
         logger.info("Creating zookeeper paths for job monitoring for job id : " + jobId + ", process : "
                 + getProcessId() + ", gateway : " + getGatewayId());
-        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/lock", new byte[0]);
-        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/gateway", getGatewayId().getBytes());
-        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/process", getProcessId().getBytes());
-        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/task", getTaskId().getBytes());
-        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/experiment", getExperimentId().getBytes());
-        this.curatorClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/monitoring/" + jobId + "/status", "pending".getBytes());
+        getCuratorClient().create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(
+                "/monitoring/" + jobId + "/lock", new byte[0]);
+        getCuratorClient().create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(
+                "/monitoring/" + jobId + "/gateway", getGatewayId().getBytes());
+        getCuratorClient().create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(
+                "/monitoring/" + jobId + "/process", getProcessId().getBytes());
+        getCuratorClient().create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(
+                "/monitoring/" + jobId + "/task", getTaskId().getBytes());
+        getCuratorClient().create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(
+                "/monitoring/" + jobId + "/experiment", getExperimentId().getBytes());
+        getCuratorClient().create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(
+                "/monitoring/" + jobId + "/status", "pending".getBytes());
     }
 
     //////////////////////
+    @SuppressWarnings("WeakerAccess")
     protected JobSubmissionOutput submitBatchJob(AgentAdaptor agentAdaptor, GroovyMapData groovyMapData, String workingDirectory) throws Exception {
         JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
                 getAppCatalog(), getTaskContext().getJobSubmissionProtocol(), getTaskContext().getPreferredJobSubmissionInterface()));
@@ -94,6 +103,7 @@ public abstract class JobSubmissionTask extends AiravataTask {
         CommandOutput commandOutput = agentAdaptor.executeCommand(submitCommand.getRawCommand(), workingDirectory);
 
         JobSubmissionOutput jsoutput = new JobSubmissionOutput();
+        jsoutput.setDescription(scriptAsString);
 
         jsoutput.setJobId(jobManagerConfiguration.getParser().parseJobSubmission(commandOutput.getStdOut()));
         if (jsoutput.getJobId() == null) {
@@ -114,12 +124,14 @@ public abstract class JobSubmissionTask extends AiravataTask {
         return jsoutput;
     }
 
+    @SuppressWarnings("WeakerAccess")
     public File getLocalDataDir() {
         String outputPath = ServerSettings.getLocalDataLocation();
         outputPath = (outputPath.endsWith(File.separator) ? outputPath : outputPath + File.separator);
         return new File(outputPath + getProcessId());
     }
 
+    @SuppressWarnings("WeakerAccess")
     public JobStatus getJobStatus(AgentAdaptor agentAdaptor, String jobID) throws Exception {
         JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
                 getAppCatalog(), getTaskContext().getJobSubmissionProtocol(), getTaskContext().getPreferredJobSubmissionInterface()));
@@ -129,6 +141,7 @@ public abstract class JobSubmissionTask extends AiravataTask {
 
     }
 
+    @SuppressWarnings("WeakerAccess")
     public String getJobIdByJobName(AgentAdaptor agentAdaptor, String jobName, String userName) throws Exception {
         JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
                 getAppCatalog(), getTaskContext().getJobSubmissionProtocol(), getTaskContext().getPreferredJobSubmissionInterface()));
@@ -138,16 +151,22 @@ public abstract class JobSubmissionTask extends AiravataTask {
         return jobManagerConfiguration.getParser().parseJobId(jobName, commandOutput.getStdOut());
     }
 
+    @SuppressWarnings("WeakerAccess")
     public void saveJobModel(JobModel jobModel) throws RegistryException {
         getExperimentCatalog().add(ExpCatChildDataType.JOB, jobModel, getProcessId());
     }
 
+    @SuppressWarnings("WeakerAccess")
     public void saveAndPublishJobStatus(JobModel jobModel) throws Exception {
         try {
             // first we save job jobModel to the registry for sa and then save the job status.
-            JobStatus jobStatus = null;
-            if(jobModel.getJobStatuses() != null)
+            JobStatus jobStatus;
+            if (jobModel.getJobStatuses() != null && jobModel.getJobStatuses().size() > 0) {
                 jobStatus = jobModel.getJobStatuses().get(0);
+            } else {
+                logger.error("Job statuses can not be empty");
+                return;
+            }
 
             List<JobStatus> statuses = new ArrayList<>();
             statuses.add(jobStatus);
@@ -173,7 +192,4 @@ public abstract class JobSubmissionTask extends AiravataTask {
             throw new Exception("Error persisting job status " + e.getLocalizedMessage(), e);
         }
     }
-
-    ///////////// required for groovy map
-
 }
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
index 225f81d..e2af339 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
@@ -41,15 +41,11 @@ import org.apache.log4j.Logger;
 import org.apache.zookeeper.data.Stat;
 
 import java.util.*;
-import java.util.stream.Collectors;
 
 public class PostWorkflowManager {
 
     private static final Logger logger = LogManager.getLogger(PostWorkflowManager.class);
 
-    //private final String BOOTSTRAP_SERVERS = "localhost:9092";
-    //private final String TOPIC = "parsed-data";
-
     private CuratorFramework curatorClient = null;
     private Publisher statusPublisher;
 
@@ -66,7 +62,7 @@ public class PostWorkflowManager {
         props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
         props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JobStatusResultDeserializer.class.getName());
         // Create the consumer using props.
-        final Consumer<String, JobStatusResult> consumer = new KafkaConsumer<String, JobStatusResult>(props);
+        final Consumer<String, JobStatusResult> consumer = new KafkaConsumer<>(props);
         // Subscribe to the topic.
         consumer.subscribe(Collections.singletonList(ServerSettings.getSetting("kafka.broker.topic")));
         return consumer;
@@ -74,32 +70,27 @@ public class PostWorkflowManager {
 
     private String getExperimentIdByJobId(String jobId) throws Exception {
         byte[] processBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/experiment");
-        String process = new String(processBytes);
-        return process;
+        return new String(processBytes);
     }
 
     private String getTaskIdByJobId(String jobId) throws Exception {
         byte[] processBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/task");
-        String process = new String(processBytes);
-        return process;
+        return new String(processBytes);
     }
 
     private String getProcessIdByJobId(String jobId) throws Exception {
         byte[] processBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/process");
-        String process = new String(processBytes);
-        return process;
+        return new String(processBytes);
     }
 
     private String getGatewayByJobId(String jobId) throws Exception {
         byte[] gatewayBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/gateway");
-        String gateway = new String(gatewayBytes);
-        return gateway;
+        return new String(gatewayBytes);
     }
 
     private String getStatusByJobId(String jobId) throws Exception {
         byte[] statusBytes = this.curatorClient.getData().forPath("/monitoring/" + jobId + "/status");
-        String status = new String(statusBytes);
-        return status;
+        return new String(statusBytes);
     }
 
     private boolean hasMonitoringRegistered(String jobId) throws Exception {
@@ -128,7 +119,7 @@ public class PostWorkflowManager {
 
                 // TODO get cluster lock before that
                 if ("cancelled".equals(status)) {
-
+                    // TODO to be implemented
                 } else {
 
                     saveAndPublishJobStatus(jobStatusResult.getJobId(), task, processId, experimentId, gateway, jobStatusResult.getState());
@@ -190,7 +181,7 @@ public class PostWorkflowManager {
                                 ServerSettings.getZookeeperConnection());
 
                         workflowManager.launchWorkflow(processId + "-POST-" + UUID.randomUUID().toString(),
-                                allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true, false);
+                                new ArrayList<>(allTasks), true, false);
 
                     } else if (jobStatusResult.getState() == JobState.CANCELED) {
                         logger.info("Job " + jobStatusResult.getJobId() + " was externally cancelled");
@@ -224,7 +215,7 @@ public class PostWorkflowManager {
         }
     }
 
-    public void saveAndPublishJobStatus(String jobId, String taskId, String processId, String experimentId, String gateway,
+    private void saveAndPublishJobStatus(String jobId, String taskId, String processId, String experimentId, String gateway,
                                         JobState jobState) throws Exception {
         try {
 
@@ -255,7 +246,7 @@ public class PostWorkflowManager {
         }
     }
 
-    public Publisher getStatusPublisher() throws AiravataException {
+    private Publisher getStatusPublisher() throws AiravataException {
         if (statusPublisher == null) {
             synchronized (RabbitMQPublisher.class) {
                 if (statusPublisher == null) {
diff --git a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
index 18a6627..383e0b0 100644
--- a/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
+++ b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.workflow;
 import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.ThriftUtils;
+import org.apache.airavata.helix.core.AbstractTask;
 import org.apache.airavata.helix.core.OutPort;
 import org.apache.airavata.helix.impl.task.AiravataTask;
 import org.apache.airavata.helix.impl.task.env.EnvSetupTask;
@@ -36,6 +37,7 @@ public class PreWorkflowManager {
 
     private final Subscriber subscriber;
 
+    @SuppressWarnings("WeakerAccess")
     public PreWorkflowManager() throws AiravataException {
         List<String> routingKeys = new ArrayList<>();
         routingKeys.add(ServerSettings.getRabbitmqProcessExchangeName());
@@ -94,7 +96,7 @@ public class PreWorkflowManager {
                 ServerSettings.getSetting("post.workflow.manager.name"),
                 ServerSettings.getZookeeperConnection());
         String workflowName = workflowManager.launchWorkflow(processId + "-PRE-" + UUID.randomUUID().toString(),
-                allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true, false);
+                new ArrayList<>(allTasks), true, false);
         return workflowName;
     }
 
diff --git a/modules/airavata-helix/helix-spectator/src/main/resources/airavata-server.properties b/modules/airavata-helix/helix-spectator/src/main/resources/airavata-server.properties
index 19b3b3d..e412896 100644
--- a/modules/airavata-helix/helix-spectator/src/main/resources/airavata-server.properties
+++ b/modules/airavata-helix/helix-spectator/src/main/resources/airavata-server.properties
@@ -45,18 +45,6 @@ jpa.cache.size=-1
 #jpa.connection.properties=MaxActive=10,MaxIdle=5,MinIdle=2,MaxWait=60000,testWhileIdle=true,testOnBorrow=true
 enable.sharing=true
 
-# Properties for default user mode
-default.registry.user=default-admin
-default.registry.password=123456
-default.registry.password.hash.method=SHA
-default.registry.gateway=default
-super.tenant.gatewayId=default
-
-# Properties for cluster status monitoring
-# cluster status monitoring job repeat time in seconds
-cluster.status.monitoring.enable=false
-cluster.status.monitoring.repeat.time=18000
-
 ###########################################################################
 #  Application Catalog DB Configuration
 ###########################################################################
@@ -84,19 +72,6 @@ replicacatalog.jdbc.password=eroma123456
 replicacatalog.validationQuery=SELECT 1 from CONFIGURATION
 
 ###########################################################################
-#  Workflow Catalog DB Configuration
-###########################################################################
-#for derby [AiravataJPARegistry]
-#workflowcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
-#workflowcatalog.jdbc.url=jdbc:derby://localhost:1527/workflow_catalog;create=true;user=airavata;password=airavata
-# MariaDB database configuration
-workflowcatalog.jdbc.driver=org.mariadb.jdbc.Driver
-workflowcatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/replica_catalog
-workflowcatalog.jdbc.user=eroma
-workflowcatalog.jdbc.password=eroma123456
-workflowcatalog.validationQuery=SELECT 1 from CONFIGURATION
-
-###########################################################################
 #  Sharing Catalog DB Configuration
 ###########################################################################
 #for derby [AiravataJPARegistry]
@@ -117,21 +92,6 @@ sharing.registry.server.host=192.168.99.102
 sharing.registry.server.port=7878
 
 ###########################################################################
-#  User Profile MongoDB Configuration
-###########################################################################
-userprofile.mongodb.host=localhost
-userprofile.mongodb.port=27017
-
-
-###########################################################################
-#  Server module Configuration
-###########################################################################
-#credential store server should be started before API server
-#This is obsolete property with new script files.
-#servers=credentialstore,apiserver,orchestrator
-
-
-###########################################################################
 #  API Server Configurations
 ###########################################################################
 apiserver=org.apache.airavata.api.server.AiravataAPIServer
@@ -141,21 +101,6 @@ apiserver.port=8930
 apiserver.min.threads=50
 
 ###########################################################################
-#  Orchestrator Server Configurations
-###########################################################################
-orchestrator=org.apache.airavata.orchestrator.server.OrchestratorServer
-orchestrator.server.name=orchestrator-node0
-orchestrator.server.host=192.168.99.102
-orchestrator.server.port=8940
-orchestrator.server.min.threads=50
-job.validators=org.apache.airavata.orchestrator.core.validator.impl.BatchQueueValidator,org.apache.airavata.orchestrator.core.validator.impl.ExperimentStatusValidator
-submitter.interval=10000
-threadpool.size=10
-start.submitter=true
-embedded.mode=true
-enable.validation=true
-
-###########################################################################
 #  Registry Server Configurations
 ###########################################################################
 regserver=org.apache.airavata.registry.api.service.RegistryAPIServer
@@ -164,28 +109,6 @@ regserver.server.host=192.168.99.102
 regserver.server.port=8970
 regserver.server.min.threads=50
 
-###########################################################################
-#  GFac Server Configurations
-###########################################################################
-gfac=org.apache.airavata.gfac.server.GfacServer
-gfac.server.name=gfac-node0
-gfac.server.host=10.0.2.15
-gfac.server.port=8950
-gfac.thread.pool.size=50
-host.scheduler=org.apache.airavata.gfac.impl.DefaultHostScheduler
-
-
-
-###########################################################################
-# Airavata Workflow Interpreter Configurations
-###########################################################################
-workflowserver=org.apache.airavata.api.server.WorkflowServer
-enactment.thread.pool.size=10
-
-#to define custom workflow parser user following property
-#workflow.parser=org.apache.airavata.workflow.core.parser.AiravataWorkflowBuilder
-
-
 
 ###########################################################################
 #  Job Scheduler can send informative email messages to you about the status of your job.
@@ -269,6 +192,8 @@ kafka.broker.consumer.group=MonitoringConsumer
 helix.cluster.name=AiravataDemoCluster
 pre.workflow.manager.name=prewm
 post.workflow.manager.name=postwm
+helix.controller.name=helixcontroller
+helix.participant.name=helixparticipant
 
 ###########################################################################
 # AMQP Notification Configuration
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
index a7e5a64..029da29 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
@@ -1,5 +1,7 @@
 package org.apache.airavata.helix.core.participant;
 
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.core.support.TaskHelperImpl;
 import org.apache.airavata.helix.core.AbstractTask;
 import org.apache.airavata.helix.core.util.PropertyResolver;
@@ -44,20 +46,13 @@ public class HelixParticipant <T extends AbstractTask> implements Runnable {
     private PropertyResolver propertyResolver;
     private Class<T> taskClass;
 
-    public HelixParticipant(String propertyFile, Class<T> taskClass, String taskTypeName, boolean readPropertyFromFile) throws IOException {
+    public HelixParticipant(Class<T> taskClass, String taskTypeName) throws ApplicationSettingsException {
 
         logger.info("Initializing Participant Node");
 
-        this.propertyResolver = new PropertyResolver();
-        if (readPropertyFromFile) {
-            propertyResolver.loadFromFile(new File(propertyFile));
-        } else {
-            propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
-        }
-
-        this.zkAddress = propertyResolver.get("zookeeper.connection.url");
-        this.clusterName = propertyResolver.get("helix.cluster.name");
-        this.participantName = propertyResolver.get("participant.name");
+        this.zkAddress = ServerSettings.getZookeeperConnection();
+        this.clusterName = ServerSettings.getSetting("helix.cluster.name");
+        this.participantName = ServerSettings.getSetting("helix.participant.name");
         this.taskTypeName = taskTypeName;
         this.taskClass = taskClass;
 

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 02/17: Building groovy map

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit ef94a5afd073359b0d75b72f51b254670953b7e2
Author: dimuthu <di...@gmail.com>
AuthorDate: Thu Feb 22 09:37:36 2018 -0500

    Building groovy map
---
 modules/helix-spectator/pom.xml                    |   5 +
 .../airavata/helix/impl/task/AiravataTask.java     | 156 +---
 .../airavata/helix/impl/task/EnvSetupTask.java     |   6 +-
 .../airavata/helix/impl/task/TaskContext.java      | 802 +++++++++++++++++++++
 .../impl/task/submission/GroovyMapBuilder.java     | 335 +++++++++
 .../helix/impl/task/submission/GroovyMapData.java  |  51 +-
 .../submission/task/DefaultJobSubmissionTask.java  |  37 +-
 .../submission/task/ForkJobSubmissionTask.java     |  21 +-
 .../task/submission/task/JobSubmissionTask.java    |  69 +-
 .../submission/task/LocalJobSubmissionTask.java    |  10 +-
 .../helix/impl/workflow/SimpleWorkflow.java        |   2 +-
 .../src/main/resources/application.properties      |   2 +-
 12 files changed, 1275 insertions(+), 221 deletions(-)

diff --git a/modules/helix-spectator/pom.xml b/modules/helix-spectator/pom.xml
index bae2785..36fb586 100644
--- a/modules/helix-spectator/pom.xml
+++ b/modules/helix-spectator/pom.xml
@@ -45,6 +45,11 @@
             <artifactId>mariadb-java-client</artifactId>
             <version>1.1.7</version>
         </dependency>
+        <dependency>
+            <groupId>org.codehaus.groovy</groupId>
+            <artifactId>groovy-templates</artifactId>
+            <version>2.4.7</version>
+        </dependency>
 
     </dependencies>
 </project>
\ No newline at end of file
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
index 72d3e17..315c07c 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
@@ -39,10 +39,8 @@ public abstract class AiravataTask extends AbstractTask {
     private ProcessModel processModel;
 
     private ComputeResourceDescription computeResourceDescription;
-    private ComputeResourcePreference gatewayComputeResourcePreference;
-    private UserComputeResourcePreference userComputeResourcePreference;
-    private UserResourceProfile userResourceProfile;
-    private GatewayResourceProfile gatewayResourceProfile;
+
+    private TaskContext taskContext;
 
     @TaskParam(name = "Process Id")
     private String processId;
@@ -87,22 +85,28 @@ public abstract class AiravataTask extends AbstractTask {
 
             this.computeResourceDescription = getAppCatalog().getComputeResource().getComputeResource(getProcessModel()
                     .getComputeResourceId());
-            this.gatewayComputeResourcePreference = getAppCatalog().getGatewayProfile()
-                    .getComputeResourcePreference(getGatewayId(), computeResourceDescription.getComputeResourceId());
-
-            this.userComputeResourcePreference = getAppCatalog().getUserResourceProfile()
-                    .getUserComputeResourcePreference(getProcessModel().getUserName(), getGatewayId(), getProcessModel()
-                            .getComputeResourceId());
-
-            this.userResourceProfile = getAppCatalog().getUserResourceProfile()
-                    .getUserResourceProfile(getProcessModel().getUserName(), getGatewayId());
-
-            this.gatewayResourceProfile = getAppCatalog().getGatewayProfile().getGatewayProfile(getGatewayId());
 
+            TaskContext.TaskContextBuilder taskContextBuilder = new TaskContext.TaskContextBuilder(getProcessId(), getGatewayId(), getTaskId());
+            taskContextBuilder.setAppCatalog(getAppCatalog());
+            taskContextBuilder.setExperimentCatalog(getExperimentCatalog());
+            taskContextBuilder.setProcessModel(getProcessModel());
+            taskContextBuilder.setStatusPublisher(getStatusPublisher());
+
+            taskContextBuilder.setGatewayResourceProfile(appCatalog.getGatewayProfile().getGatewayProfile(gatewayId));
+            taskContextBuilder.setGatewayComputeResourcePreference(
+                            appCatalog.getGatewayProfile()
+                                    .getComputeResourcePreference(gatewayId, processModel.getComputeResourceId()));
+            taskContextBuilder.setGatewayStorageResourcePreference(
+                            appCatalog.getGatewayProfile()
+                                    .getStoragePreference(gatewayId, processModel.getStorageResourceId()));
+
+            this.taskContext = taskContextBuilder.build();
         } catch (AppCatalogException e) {
             e.printStackTrace();
         } catch (RegistryException e) {
             e.printStackTrace();
+        } catch (Exception e) {
+            e.printStackTrace();
         }
     }
 
@@ -125,140 +129,27 @@ public abstract class AiravataTask extends AbstractTask {
         msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
     }
 
-
-    ///////////////////
-
-    public String getComputeResourceId() {
-        if (isUseUserCRPref() &&
-                userComputeResourcePreference != null &&
-                isValid(userComputeResourcePreference.getComputeResourceId())) {
-            return userComputeResourcePreference.getComputeResourceId();
-        } else {
-            return gatewayComputeResourcePreference.getComputeResourceId();
-        }
-    }
-
-    public String getComputeResourceCredentialToken(){
-        if (isUseUserCRPref()) {
-            if (userComputeResourcePreference != null &&
-                    isValid(userComputeResourcePreference.getResourceSpecificCredentialStoreToken())) {
-                return userComputeResourcePreference.getResourceSpecificCredentialStoreToken();
-            } else {
-                return userResourceProfile.getCredentialStoreToken();
-            }
-        } else {
-            if (isValid(gatewayComputeResourcePreference.getResourceSpecificCredentialStoreToken())) {
-                return gatewayComputeResourcePreference.getResourceSpecificCredentialStoreToken();
-            } else {
-                return gatewayResourceProfile.getCredentialStoreToken();
-            }
-        }
-    }
-
-    public String getComputeResourceLoginUserName(){
-        if (isUseUserCRPref() &&
-                userComputeResourcePreference != null &&
-                isValid(userComputeResourcePreference.getLoginUserName())) {
-            return userComputeResourcePreference.getLoginUserName();
-        } else if (isValid(getProcessModel().getProcessResourceSchedule().getOverrideLoginUserName())) {
-            return getProcessModel().getProcessResourceSchedule().getOverrideLoginUserName();
-        } else {
-            return gatewayComputeResourcePreference.getLoginUserName();
-        }
-    }
-
-    public JobSubmissionInterface getPreferredJobSubmissionInterface() throws AppCatalogException {
-        try {
-            JobSubmissionProtocol preferredJobSubmissionProtocol = getJobSubmissionProtocol();
-            ComputeResourceDescription resourceDescription = getComputeResourceDescription();
-            List<JobSubmissionInterface> jobSubmissionInterfaces = resourceDescription.getJobSubmissionInterfaces();
-            Map<JobSubmissionProtocol, List<JobSubmissionInterface>> orderedInterfaces = new HashMap<>();
-            List<JobSubmissionInterface> interfaces = new ArrayList<>();
-            if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()) {
-                for (JobSubmissionInterface submissionInterface : jobSubmissionInterfaces){
-
-                    if (preferredJobSubmissionProtocol != null){
-                        if (preferredJobSubmissionProtocol.toString().equals(submissionInterface.getJobSubmissionProtocol().toString())){
-                            if (orderedInterfaces.containsKey(submissionInterface.getJobSubmissionProtocol())){
-                                List<JobSubmissionInterface> interfaceList = orderedInterfaces.get(submissionInterface.getJobSubmissionProtocol());
-                                interfaceList.add(submissionInterface);
-                            }else {
-                                interfaces.add(submissionInterface);
-                                orderedInterfaces.put(submissionInterface.getJobSubmissionProtocol(), interfaces);
-                            }
-                        }
-                    }else {
-                        Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
-                            @Override
-                            public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
-                                return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
-                            }
-                        });
-                    }
-                }
-                interfaces = orderedInterfaces.get(preferredJobSubmissionProtocol);
-                Collections.sort(interfaces, new Comparator<JobSubmissionInterface>() {
-                    @Override
-                    public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
-                        return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
-                    }
-                });
-            } else {
-                throw new AppCatalogException("Compute resource should have at least one job submission interface defined...");
-            }
-            return interfaces.get(0);
-        } catch (AppCatalogException e) {
-            throw new AppCatalogException("Error occurred while retrieving data from app catalog", e);
-        }
-    }
-
     //////////////////////////
 
-
-    protected boolean isValid(String str) {
-        return str != null && !str.trim().isEmpty();
-    }
-
-    public boolean isUseUserCRPref() {
-        return getProcessModel().isUseUserCRPref();
-    }
-
-    public JobSubmissionProtocol getJobSubmissionProtocol() {
-        return getGatewayComputeResourcePreference().getPreferredJobSubmissionProtocol();
-    }
-
-    public ComputeResourcePreference getGatewayComputeResourcePreference() {
-        return gatewayComputeResourcePreference;
-    }
-
-
     public ComputeResourceDescription getComputeResourceDescription() {
         return computeResourceDescription;
     }
 
     ////////////////////////
 
-    
-    public void setAppCatalog(AppCatalog appCatalog) {
-        this.appCatalog = appCatalog;
+
+    public TaskContext getTaskContext() {
+        return taskContext;
     }
 
     public ExperimentCatalog getExperimentCatalog() {
         return experimentCatalog;
     }
 
-    public void setExperimentCatalog(ExperimentCatalog experimentCatalog) {
-        this.experimentCatalog = experimentCatalog;
-    }
-
     public Publisher getStatusPublisher() {
         return statusPublisher;
     }
 
-    public void setStatusPublisher(Publisher statusPublisher) {
-        this.statusPublisher = statusPublisher;
-    }
-
     public String getProcessId() {
         return processId;
     }
@@ -287,7 +178,4 @@ public abstract class AiravataTask extends AbstractTask {
         return processModel;
     }
 
-    public void setProcessModel(ProcessModel processModel) {
-        this.processModel = processModel;
-    }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
index 1cab0e2..f079b9f 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
@@ -23,8 +23,10 @@ public class EnvSetupTask extends AiravataTask {
     public TaskResult onRun(TaskHelper taskHelper) {
         try {
             publishTaskState(TaskState.EXECUTING);
-            AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
-                    getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
+            AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                    getTaskContext().getComputeResourceId(),
+                    getTaskContext().getJobSubmissionProtocol().name(),
+                    getTaskContext().getComputeResourceCredentialToken());
 
             adaptor.createDirectory(workingDirectory);
             publishTaskState(TaskState.COMPLETED);
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
new file mode 100644
index 0000000..7de738e
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
@@ -0,0 +1,802 @@
+package org.apache.airavata.helix.impl.task;
+
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.common.utils.ThriftUtils;
+import org.apache.airavata.messaging.core.Publisher;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.*;
+import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
+import org.apache.airavata.model.appcatalog.gatewayprofile.StoragePreference;
+import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
+import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
+import org.apache.airavata.model.appcatalog.userresourceprofile.UserStoragePreference;
+import org.apache.airavata.model.data.movement.DataMovementProtocol;
+import org.apache.airavata.model.job.JobModel;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel;
+import org.apache.airavata.model.status.ProcessState;
+import org.apache.airavata.model.status.ProcessStatus;
+import org.apache.airavata.model.task.TaskModel;
+import org.apache.airavata.registry.cpi.AppCatalog;
+import org.apache.airavata.registry.cpi.AppCatalogException;
+import org.apache.airavata.registry.cpi.ExperimentCatalog;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+public class TaskContext {
+
+    private static final Logger log = LoggerFactory.getLogger(TaskContext.class);
+    // process model
+    private ExperimentCatalog experimentCatalog;
+    private AppCatalog appCatalog;
+    private Publisher statusPublisher;
+    private final String processId;
+    private final String gatewayId;
+    //private final String tokenId;
+    private ProcessModel processModel;
+    private String workingDir;
+    private String scratchLocation;
+    private String inputDir;
+    private String outputDir;
+    private String localWorkingDir;
+    private GatewayResourceProfile gatewayResourceProfile;
+    private ComputeResourcePreference gatewayComputeResourcePreference;
+    private StoragePreference gatewayStorageResourcePreference;
+    private UserResourceProfile userResourceProfile;
+    private UserComputeResourcePreference userComputeResourcePreference;
+    private UserStoragePreference userStoragePreference;
+    private ComputeResourceDescription computeResourceDescription;
+    private ApplicationDeploymentDescription applicationDeploymentDescription;
+    private ApplicationInterfaceDescription applicationInterfaceDescription;
+    private Map<String, String> sshProperties;
+    private String stdoutLocation;
+    private String stderrLocation;
+    private JobSubmissionProtocol jobSubmissionProtocol;
+    private DataMovementProtocol dataMovementProtocol;
+    private JobModel jobModel;
+    private StorageResourceDescription storageResource;
+    private MonitorMode monitorMode;
+    private ResourceJobManager resourceJobManager;
+    private boolean handOver;
+    private boolean cancel;
+    private List<String> taskExecutionOrder;
+    private List<TaskModel> taskList;
+    private Map<String, TaskModel> taskMap;
+    private boolean pauseTaskExecution = false;  // Task can pause task execution by setting this value
+    private boolean complete = false; // all tasks executed?
+    private boolean recovery = false; // is process in recovery mode?
+    private TaskModel currentExecutingTaskModel; // current execution task model in case we pause process execution we need this to continue process exectuion again
+    private boolean acknowledge;
+    private boolean recoveryWithCancel = false;
+    private String usageReportingGatewayId;
+    private List<String> queueSpecificMacros;
+    private String taskId;
+    private Object subTaskModel = null;
+
+
+    /**
+     * Note: process context property use lazy loading approach. In runtime you will see some properties as null
+     * unless you have access it previously. Once that property access using the api,it will be set to correct value.
+     */
+    private TaskContext(String taskId, String processId, String gatewayId) {
+        this.processId = processId;
+        this.gatewayId = gatewayId;
+        this.taskId = taskId;
+    }
+
+    public ExperimentCatalog getExperimentCatalog() {
+        return experimentCatalog;
+    }
+
+    public void setExperimentCatalog(ExperimentCatalog experimentCatalog) {
+        this.experimentCatalog = experimentCatalog;
+    }
+
+    public AppCatalog getAppCatalog() {
+        return appCatalog;
+    }
+
+    public void setAppCatalog(AppCatalog appCatalog) {
+        this.appCatalog = appCatalog;
+    }
+
+    public String getGatewayId() {
+        return gatewayId;
+    }
+
+    public String getProcessId() {
+        return processId;
+    }
+
+    public Publisher getStatusPublisher() {
+        return statusPublisher;
+    }
+
+    public void setStatusPublisher(Publisher statusPublisher) {
+        this.statusPublisher = statusPublisher;
+    }
+
+    public ProcessModel getProcessModel() {
+        return processModel;
+    }
+
+    public void setProcessModel(ProcessModel processModel) {
+        this.processModel = processModel;
+    }
+
+    public String getWorkingDir() {
+        if (workingDir == null) {
+            if (processModel.getProcessResourceSchedule().getStaticWorkingDir() != null){
+                workingDir = processModel.getProcessResourceSchedule().getStaticWorkingDir();
+            }else {
+                String scratchLocation = getScratchLocation();
+                workingDir = (scratchLocation.endsWith("/") ? scratchLocation + processId : scratchLocation + "/" +
+                        processId);
+            }
+        }
+        return workingDir;
+    }
+
+    public String getScratchLocation() {
+        if (scratchLocation == null) {
+            if (isUseUserCRPref() &&
+                    userComputeResourcePreference != null &&
+                    isValid(userComputeResourcePreference.getScratchLocation())) {
+                scratchLocation = userComputeResourcePreference.getScratchLocation();
+            } else if (isValid(processModel.getProcessResourceSchedule().getOverrideScratchLocation())) {
+                scratchLocation = processModel.getProcessResourceSchedule().getOverrideScratchLocation();
+            }else {
+                scratchLocation = gatewayComputeResourcePreference.getScratchLocation();
+            }
+        }
+        return scratchLocation;
+    }
+
+    public void setWorkingDir(String workingDir) {
+        this.workingDir = workingDir;
+    }
+
+    public GatewayResourceProfile getGatewayResourceProfile() {
+        return gatewayResourceProfile;
+    }
+
+    public void setGatewayResourceProfile(GatewayResourceProfile gatewayResourceProfile) {
+        this.gatewayResourceProfile = gatewayResourceProfile;
+    }
+
+    public UserResourceProfile getUserResourceProfile() {
+        return userResourceProfile;
+    }
+
+    public void setUserResourceProfile(UserResourceProfile userResourceProfile) {
+        this.userResourceProfile = userResourceProfile;
+    }
+
+    private UserComputeResourcePreference getUserComputeResourcePreference() {
+        return userComputeResourcePreference;
+    }
+
+    public void setUserComputeResourcePreference(UserComputeResourcePreference userComputeResourcePreference) {
+        this.userComputeResourcePreference = userComputeResourcePreference;
+    }
+
+    public UserStoragePreference getUserStoragePreference() {
+        return userStoragePreference;
+    }
+
+    public void setUserStoragePreference(UserStoragePreference userStoragePreference) {
+        this.userStoragePreference = userStoragePreference;
+    }
+
+    public StoragePreference getGatewayStorageResourcePreference() {
+        return gatewayStorageResourcePreference;
+    }
+
+    public void setGatewayStorageResourcePreference(StoragePreference gatewayStorageResourcePreference) {
+        this.gatewayStorageResourcePreference = gatewayStorageResourcePreference;
+    }
+
+
+    public Map<String, String> getSshProperties() {
+        return sshProperties;
+    }
+
+    public void setSshProperties(Map<String, String> sshProperties) {
+        this.sshProperties = sshProperties;
+    }
+
+    public ComputeResourceDescription getComputeResourceDescription() {
+        return computeResourceDescription;
+    }
+
+    public void setComputeResourceDescription(ComputeResourceDescription computeResourceDescription) {
+        this.computeResourceDescription = computeResourceDescription;
+    }
+
+    public ApplicationDeploymentDescription getApplicationDeploymentDescription() {
+        return applicationDeploymentDescription;
+    }
+
+    public void setApplicationDeploymentDescription(ApplicationDeploymentDescription
+                                                            applicationDeploymentDescription) {
+        this.applicationDeploymentDescription = applicationDeploymentDescription;
+    }
+
+    public ApplicationInterfaceDescription getApplicationInterfaceDescription() {
+        return applicationInterfaceDescription;
+    }
+
+    public void setApplicationInterfaceDescription(ApplicationInterfaceDescription applicationInterfaceDescription) {
+        this.applicationInterfaceDescription = applicationInterfaceDescription;
+    }
+
+    public String getStdoutLocation() {
+        return stdoutLocation;
+    }
+
+    public void setStdoutLocation(String stdoutLocation) {
+        this.stdoutLocation = stdoutLocation;
+    }
+
+    public String getStderrLocation() {
+        return stderrLocation;
+    }
+
+    public void setStderrLocation(String stderrLocation) {
+        this.stderrLocation = stderrLocation;
+    }
+
+    public void setOutputDir(String outputDir) {
+        this.outputDir = outputDir;
+    }
+
+    public String getOutputDir() {
+        if (outputDir == null) {
+            outputDir = getWorkingDir();
+        }
+        return outputDir;
+    }
+
+    public String getInputDir() {
+        if (inputDir == null) {
+            inputDir = getWorkingDir();
+        }
+        return inputDir;
+    }
+
+    public void setInputDir(String inputDir) {
+        this.inputDir = inputDir;
+    }
+
+    public JobSubmissionProtocol getJobSubmissionProtocol() {
+        if (jobSubmissionProtocol == null) {
+            jobSubmissionProtocol = gatewayComputeResourcePreference.getPreferredJobSubmissionProtocol();
+        }
+        return jobSubmissionProtocol;
+    }
+
+    public void setJobSubmissionProtocol(JobSubmissionProtocol jobSubmissionProtocol) {
+        this.jobSubmissionProtocol = jobSubmissionProtocol;
+    }
+
+    public DataMovementProtocol getDataMovementProtocol() {
+        if (dataMovementProtocol == null) {
+            dataMovementProtocol = gatewayComputeResourcePreference.getPreferredDataMovementProtocol();
+        }
+        return dataMovementProtocol;
+    }
+
+    public void setDataMovementProtocol(DataMovementProtocol dataMovementProtocol) {
+        this.dataMovementProtocol = dataMovementProtocol;
+    }
+
+    public String getTaskDag() {
+        return getProcessModel().getTaskDag();
+    }
+
+    public List<TaskModel> getTaskList() {
+        if (taskList == null) {
+            synchronized (TaskModel.class){
+                if (taskList == null) {
+                    taskList = getProcessModel().getTasks();
+                }
+            }
+        }
+        return taskList;
+    }
+
+
+    public List<String> getTaskExecutionOrder() {
+        return taskExecutionOrder;
+    }
+
+    public void setTaskExecutionOrder(List<String> taskExecutionOrder) {
+        this.taskExecutionOrder = taskExecutionOrder;
+    }
+
+    public Map<String, TaskModel> getTaskMap() {
+        if (taskMap == null) {
+            synchronized (TaskModel.class) {
+                if (taskMap == null) {
+                    taskMap = new HashMap<>();
+                    for (TaskModel taskModel : getTaskList()) {
+                        taskMap.put(taskModel.getTaskId(), taskModel);
+                    }
+                }
+            }
+        }
+        return taskMap;
+    }
+
+    public JobModel getJobModel() {
+        if (jobModel == null) {
+            jobModel = new JobModel();
+            jobModel.setProcessId(processId);
+            jobModel.setWorkingDir(getWorkingDir());
+            jobModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
+        }
+        return jobModel;
+    }
+
+    public void setJobModel(JobModel jobModel) {
+        this.jobModel = jobModel;
+    }
+
+    public ComputeResourcePreference getGatewayComputeResourcePreference() {
+        return gatewayComputeResourcePreference;
+    }
+
+    public void setGatewayComputeResourcePreference(ComputeResourcePreference gatewayComputeResourcePreference) {
+        this.gatewayComputeResourcePreference = gatewayComputeResourcePreference;
+    }
+
+    public ProcessState getProcessState() {
+        if(processModel.getProcessStatuses() != null && processModel.getProcessStatuses().size() > 0)
+            return processModel.getProcessStatuses().get(0).getState();
+        else
+            return null;
+    }
+
+    public void setProcessStatus(ProcessStatus status) {
+        if (status != null) {
+            log.info("expId: {}, processId: {} :- Process status changed {} -> {}", getExperimentId(), processId,
+                    getProcessState().name(), status.getState().name());
+            List<ProcessStatus> processStatuses = new ArrayList<>();
+            processStatuses.add(status);
+            processModel.setProcessStatuses(processStatuses);
+        }
+    }
+
+    public ProcessStatus getProcessStatus(){
+        if(processModel.getProcessStatuses() != null)
+            return processModel.getProcessStatuses().get(0);
+        else
+            return null;
+    }
+
+    public String getComputeResourceId() {
+        if (isUseUserCRPref() &&
+                userComputeResourcePreference != null &&
+                isValid(userComputeResourcePreference.getComputeResourceId())) {
+            return userComputeResourcePreference.getComputeResourceId();
+        } else {
+            return gatewayComputeResourcePreference.getComputeResourceId();
+        }
+    }
+
+    public String getComputeResourceCredentialToken(){
+        if (isUseUserCRPref()) {
+            if (userComputeResourcePreference != null &&
+                    isValid(userComputeResourcePreference.getResourceSpecificCredentialStoreToken())) {
+                return userComputeResourcePreference.getResourceSpecificCredentialStoreToken();
+            } else {
+                return userResourceProfile.getCredentialStoreToken();
+            }
+        } else {
+            if (isValid(gatewayComputeResourcePreference.getResourceSpecificCredentialStoreToken())) {
+                return gatewayComputeResourcePreference.getResourceSpecificCredentialStoreToken();
+            } else {
+                return gatewayResourceProfile.getCredentialStoreToken();
+            }
+        }
+    }
+
+    public String getStorageResourceCredentialToken(){
+        if (isValid(gatewayStorageResourcePreference.getResourceSpecificCredentialStoreToken())) {
+            return gatewayStorageResourcePreference.getResourceSpecificCredentialStoreToken();
+        } else {
+            return gatewayResourceProfile.getCredentialStoreToken();
+        }
+    }
+
+    public JobSubmissionProtocol getPreferredJobSubmissionProtocol(){
+        return gatewayComputeResourcePreference.getPreferredJobSubmissionProtocol();
+    }
+
+    public DataMovementProtocol getPreferredDataMovementProtocol() {
+        return gatewayComputeResourcePreference.getPreferredDataMovementProtocol();
+    }
+
+    public void setMonitorMode(MonitorMode monitorMode) {
+        this.monitorMode = monitorMode;
+    }
+
+    public MonitorMode getMonitorMode() {
+        return monitorMode;
+    }
+
+    public void setResourceJobManager(ResourceJobManager resourceJobManager) {
+        this.resourceJobManager = resourceJobManager;
+    }
+
+    public ResourceJobManager getResourceJobManager() {
+        return resourceJobManager;
+    }
+
+    public String getLocalWorkingDir() {
+        return localWorkingDir;
+    }
+
+    public void setLocalWorkingDir(String localWorkingDir) {
+        this.localWorkingDir = localWorkingDir;
+    }
+
+    public String getExperimentId() {
+        return processModel.getExperimentId();
+    }
+
+    public boolean isHandOver() {
+        return handOver;
+    }
+
+    public void setHandOver(boolean handOver) {
+        this.handOver = handOver;
+    }
+
+    public boolean isCancel() {
+        return cancel;
+    }
+
+    public void setCancel(boolean cancel) {
+        this.cancel = cancel;
+    }
+
+    public boolean isInterrupted(){
+        return this.cancel || this.handOver;
+    }
+
+    public String getCurrentExecutingTaskId() {
+        if (currentExecutingTaskModel != null) {
+            return currentExecutingTaskModel.getTaskId();
+        }
+        return null;
+    }
+
+    public boolean isPauseTaskExecution() {
+        return pauseTaskExecution;
+    }
+
+    public void setPauseTaskExecution(boolean pauseTaskExecution) {
+        this.pauseTaskExecution = pauseTaskExecution;
+    }
+
+    public boolean isComplete() {
+        return complete;
+    }
+
+    public void setComplete(boolean complete) {
+        this.complete = complete;
+    }
+
+    public boolean isRecovery() {
+        return recovery;
+    }
+
+    public void setRecovery(boolean recovery) {
+        this.recovery = recovery;
+    }
+
+    public TaskModel getCurrentExecutingTaskModel() {
+        return currentExecutingTaskModel;
+    }
+
+    public void setCurrentExecutingTaskModel(TaskModel currentExecutingTaskModel) {
+        this.currentExecutingTaskModel = currentExecutingTaskModel;
+    }
+
+    public StorageResourceDescription getStorageResource() {
+        return storageResource;
+    }
+
+    public void setStorageResource(StorageResourceDescription storageResource) {
+        this.storageResource = storageResource;
+    }
+
+    public void setAcknowledge(boolean acknowledge) {
+        this.acknowledge = acknowledge;
+    }
+
+    public boolean isAcknowledge() {
+        return acknowledge;
+    }
+
+    public boolean isRecoveryWithCancel() {
+        return recoveryWithCancel;
+    }
+
+    public void setRecoveryWithCancel(boolean recoveryWithCancel) {
+        this.recoveryWithCancel = recoveryWithCancel;
+    }
+
+    public boolean isUseUserCRPref() {
+        return getProcessModel().isUseUserCRPref();
+    }
+
+    public String getComputeResourceLoginUserName(){
+        if (isUseUserCRPref() &&
+                userComputeResourcePreference != null &&
+                isValid(userComputeResourcePreference.getLoginUserName())) {
+            return userComputeResourcePreference.getLoginUserName();
+        } else if (isValid(processModel.getProcessResourceSchedule().getOverrideLoginUserName())) {
+            return processModel.getProcessResourceSchedule().getOverrideLoginUserName();
+        } else {
+            return gatewayComputeResourcePreference.getLoginUserName();
+        }
+    }
+
+    public String getStorageResourceLoginUserName(){
+        return gatewayStorageResourcePreference.getLoginUserName();
+    }
+
+    public String getStorageFileSystemRootLocation(){
+        return gatewayStorageResourcePreference.getFileSystemRootLocation();
+    }
+
+    public String getStorageResourceId() {
+        return gatewayStorageResourcePreference.getStorageResourceId();
+    }
+
+    private ComputationalResourceSchedulingModel getProcessCRSchedule() {
+        if (getProcessModel() != null) {
+            return getProcessModel().getProcessResourceSchedule();
+        } else {
+            return null;
+        }
+    }
+
+    private boolean isValid(String str) {
+        return str != null && !str.trim().isEmpty();
+    }
+
+    public String getUsageReportingGatewayId() {
+        return gatewayComputeResourcePreference.getUsageReportingGatewayId();
+    }
+
+    public String getAllocationProjectNumber() {
+        return gatewayComputeResourcePreference.getAllocationProjectNumber();
+    }
+
+    public String getReservation() {
+        long start = 0, end = 0;
+        String reservation = null;
+        if (isUseUserCRPref() &&
+                userComputeResourcePreference != null &&
+                isValid(userComputeResourcePreference.getReservation())) {
+            reservation = userComputeResourcePreference.getReservation();
+            start = userComputeResourcePreference.getReservationStartTime();
+            end = userComputeResourcePreference.getReservationEndTime();
+        } else {
+            reservation = gatewayComputeResourcePreference.getReservation();
+            start = gatewayComputeResourcePreference.getReservationStartTime();
+            end = gatewayComputeResourcePreference.getReservationEndTime();
+        }
+        if (reservation != null && start > 0 && start < end) {
+            long now = Calendar.getInstance().getTimeInMillis();
+            if (now > start && now < end) {
+                return reservation;
+            }
+        }
+        return null;
+    }
+
+    public String getQualityOfService() {
+        if (isUseUserCRPref() &&
+                userComputeResourcePreference != null &&
+                isValid(userComputeResourcePreference.getQualityOfService())) {
+            return userComputeResourcePreference.getQualityOfService();
+        } else {
+            return gatewayComputeResourcePreference.getQualityOfService();
+        }
+    }
+
+
+    public String getQueueName() {
+        if (isUseUserCRPref() &&
+                userComputeResourcePreference != null &&
+                isValid(userComputeResourcePreference.getPreferredBatchQueue())) {
+            return userComputeResourcePreference.getPreferredBatchQueue();
+        } else if (isValid(processModel.getProcessResourceSchedule().getQueueName())) {
+            return processModel.getProcessResourceSchedule().getQueueName();
+        } else {
+            return gatewayComputeResourcePreference.getPreferredBatchQueue();
+        }
+    }
+
+    public List<String> getQueueSpecificMacros() {
+        String queueName = getProcessCRSchedule().getQueueName();
+        Optional<BatchQueue> queue = getComputeResourceDescription().getBatchQueues().stream()
+                .filter(x->x.getQueueName().equals(queueName)).findFirst();
+        if(queue.isPresent()){
+            if(queue.get().getQueueSpecificMacros() != null && !queue.get().getQueueSpecificMacros().equals("")){
+                return Arrays.asList(queue.get().getQueueSpecificMacros().split(","));
+            }
+        }
+        return null;
+    }
+
+    public JobSubmissionInterface getPreferredJobSubmissionInterface() throws AppCatalogException {
+        try {
+            JobSubmissionProtocol preferredJobSubmissionProtocol = getJobSubmissionProtocol();
+            ComputeResourceDescription resourceDescription = getComputeResourceDescription();
+            List<JobSubmissionInterface> jobSubmissionInterfaces = resourceDescription.getJobSubmissionInterfaces();
+            Map<JobSubmissionProtocol, List<JobSubmissionInterface>> orderedInterfaces = new HashMap<>();
+            List<JobSubmissionInterface> interfaces = new ArrayList<>();
+            if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()) {
+                for (JobSubmissionInterface submissionInterface : jobSubmissionInterfaces){
+
+                    if (preferredJobSubmissionProtocol != null){
+                        if (preferredJobSubmissionProtocol.toString().equals(submissionInterface.getJobSubmissionProtocol().toString())){
+                            if (orderedInterfaces.containsKey(submissionInterface.getJobSubmissionProtocol())){
+                                List<JobSubmissionInterface> interfaceList = orderedInterfaces.get(submissionInterface.getJobSubmissionProtocol());
+                                interfaceList.add(submissionInterface);
+                            }else {
+                                interfaces.add(submissionInterface);
+                                orderedInterfaces.put(submissionInterface.getJobSubmissionProtocol(), interfaces);
+                            }
+                        }
+                    }else {
+                        Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
+                            @Override
+                            public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
+                                return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
+                            }
+                        });
+                    }
+                }
+                interfaces = orderedInterfaces.get(preferredJobSubmissionProtocol);
+                Collections.sort(interfaces, new Comparator<JobSubmissionInterface>() {
+                    @Override
+                    public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
+                        return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
+                    }
+                });
+            } else {
+                throw new AppCatalogException("Compute resource should have at least one job submission interface defined...");
+            }
+            return interfaces.get(0);
+        } catch (AppCatalogException e) {
+            throw new AppCatalogException("Error occurred while retrieving data from app catalog", e);
+        }
+    }
+
+    public TaskModel getCurrentTaskModel() {
+        return getTaskMap().get(taskId);
+    }
+
+    public Object getSubTaskModel() throws TException {
+        if (subTaskModel == null) {
+            subTaskModel = ThriftUtils.getSubTaskModel(getCurrentTaskModel());
+        }
+        return subTaskModel;
+    }
+
+    public static class TaskContextBuilder {
+        private final String processId;
+        private final String gatewayId;
+        private final String taskId;
+        private ExperimentCatalog experimentCatalog;
+        private AppCatalog appCatalog;
+        private Publisher statusPublisher;
+        private GatewayResourceProfile gatewayResourceProfile;
+        private ComputeResourcePreference gatewayComputeResourcePreference;
+        private StoragePreference gatewayStorageResourcePreference;
+        private ProcessModel processModel;
+
+        public TaskContextBuilder(String processId, String gatewayId, String taskId) throws Exception {
+            if (notValid(processId) || notValid(gatewayId) || notValid(taskId)) {
+                throwError("Process Id, Gateway Id and Task Id must be not null");
+            }
+            this.processId = processId;
+            this.gatewayId = gatewayId;
+            this.taskId = taskId;
+        }
+
+        public TaskContextBuilder setGatewayResourceProfile(GatewayResourceProfile gatewayResourceProfile) {
+            this.gatewayResourceProfile = gatewayResourceProfile;
+            return this;
+        }
+
+        public TaskContextBuilder setGatewayComputeResourcePreference(ComputeResourcePreference gatewayComputeResourcePreference) {
+            this.gatewayComputeResourcePreference = gatewayComputeResourcePreference;
+            return this;
+        }
+
+        public TaskContextBuilder setGatewayStorageResourcePreference(StoragePreference gatewayStorageResourcePreference) {
+            this.gatewayStorageResourcePreference = gatewayStorageResourcePreference;
+            return this;
+        }
+
+        public TaskContextBuilder setProcessModel(ProcessModel processModel) {
+            this.processModel = processModel;
+            return this;
+        }
+
+        public TaskContextBuilder setExperimentCatalog(ExperimentCatalog experimentCatalog) {
+            this.experimentCatalog = experimentCatalog;
+            return this;
+        }
+
+        public TaskContextBuilder setAppCatalog(AppCatalog appCatalog) {
+            this.appCatalog = appCatalog;
+            return this;
+        }
+
+        public TaskContextBuilder setStatusPublisher(Publisher statusPublisher) {
+            this.statusPublisher = statusPublisher;
+            return this;
+        }
+
+        public TaskContext build() throws Exception {
+            if (notValid(gatewayResourceProfile)) {
+                throwError("Invalid GatewayResourceProfile");
+            }
+            if (notValid(gatewayComputeResourcePreference)) {
+                throwError("Invalid Gateway ComputeResourcePreference");
+            }
+            if (notValid(gatewayStorageResourcePreference)) {
+                throwError("Invalid Gateway StoragePreference");
+            }
+            if (notValid(processModel)) {
+                throwError("Invalid Process Model");
+            }
+            if (notValid(appCatalog)) {
+                throwError("Invalid AppCatalog");
+            }
+            if (notValid(experimentCatalog)) {
+                throwError("Invalid Experiment catalog");
+            }
+            //if (notValid(statusPublisher)) {
+              //  throwError("Invalid Status Publisher");
+            //}
+
+            TaskContext ctx = new TaskContext(processId, gatewayId, taskId);
+            ctx.setAppCatalog(appCatalog);
+            ctx.setExperimentCatalog(experimentCatalog);
+            ctx.setStatusPublisher(statusPublisher);
+            ctx.setProcessModel(processModel);
+            ctx.setGatewayResourceProfile(gatewayResourceProfile);
+            ctx.setGatewayComputeResourcePreference(gatewayComputeResourcePreference);
+            ctx.setGatewayStorageResourcePreference(gatewayStorageResourcePreference);
+
+            return ctx;
+        }
+
+        private boolean notValid(Object value) {
+            return value == null;
+        }
+
+        private void throwError(String msg) throws Exception {
+            throw new Exception(msg);
+        }
+
+    }
+}
+
+
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
new file mode 100644
index 0000000..0b92922
--- /dev/null
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
@@ -0,0 +1,335 @@
+package org.apache.airavata.helix.impl.task.submission;
+
+import groovy.text.GStringTemplateEngine;
+import groovy.text.TemplateEngine;
+import org.apache.airavata.helix.impl.task.TaskContext;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appdeployment.CommandObject;
+import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+import org.apache.airavata.model.application.io.DataType;
+import org.apache.airavata.model.application.io.InputDataObjectType;
+import org.apache.airavata.model.application.io.OutputDataObjectType;
+import org.apache.airavata.model.parallelism.ApplicationParallelismType;
+import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel;
+import org.apache.airavata.model.task.JobSubmissionTaskModel;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+import org.apache.thrift.TException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+public class GroovyMapBuilder {
+
+    private static final Logger logger = LogManager.getLogger(GroovyMapBuilder.class);
+
+    public static final String MULTIPLE_INPUTS_SPLITTER = ",";
+
+    private TaskContext taskContext;
+
+    public GroovyMapBuilder(TaskContext taskContext) {
+        this.taskContext = taskContext;
+    }
+
+    public GroovyMapData build() throws Exception {
+        GroovyMapData mapData = new GroovyMapData();
+        mapData.setInputDir(taskContext.getInputDir());
+        mapData.setOutputDir(taskContext.getOutputDir());
+        mapData.setExecutablePath(taskContext.getApplicationDeploymentDescription().getExecutablePath());
+        mapData.setStdoutFile(taskContext.getStdoutLocation());
+        mapData.setStderrFile(taskContext.getStderrLocation());
+        mapData.setScratchLocation(taskContext.getScratchLocation());
+        mapData.setGatewayId(taskContext.getGatewayId());
+        mapData.setGatewayUserName(taskContext.getProcessModel().getUserName());
+        mapData.setApplicationName(taskContext.getApplicationInterfaceDescription().getApplicationName());
+        mapData.setQueueSpecificMacros(taskContext.getQueueSpecificMacros());
+        mapData.setAccountString(taskContext.getAllocationProjectNumber());
+        mapData.setReservation(taskContext.getReservation());
+        mapData.setJobName("A" + String.valueOf(generateJobName()));
+
+        List<String> inputValues = getProcessInputValues(taskContext.getProcessModel().getProcessInputs(), true);
+        inputValues.addAll(getProcessOutputValues(taskContext.getProcessModel().getProcessOutputs(), true));
+        mapData.setInputs(inputValues);
+
+        List<String> inputValuesAll = getProcessInputValues(taskContext.getProcessModel().getProcessInputs(), false);
+        inputValues.addAll(getProcessOutputValues(taskContext.getProcessModel().getProcessOutputs(), false));
+        mapData.setInputsAll(inputValuesAll);
+
+        //mapData.setUserName(taskContext.geJo)
+
+        mapData.setShellName("/bin/bash");
+
+        if (taskContext != null) {
+            try {
+                JobSubmissionTaskModel jobSubmissionTaskModel = ((JobSubmissionTaskModel) taskContext.getSubTaskModel());
+                if (jobSubmissionTaskModel.getWallTime() > 0) {
+                    mapData.setMaxWallTime(maxWallTimeCalculator(jobSubmissionTaskModel.getWallTime()));
+                    // TODO fix this
+                    /*if (resourceJobManager != null) {
+                        if (resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)) {
+                            groovyMap.add(Script.MAX_WALL_TIME,
+                                    GFacUtils.maxWallTimeCalculatorForLSF(jobSubmissionTaskModel.getWallTime()));
+                        }
+                    }*/
+                }
+            } catch (TException e) {
+                logger.error("Error while getting job submission sub task model", e);
+            }
+        }
+
+        // NOTE: Give precedence to data comes with experiment
+        // qos per queue
+        String qoS = getQoS(taskContext.getQualityOfService(), taskContext.getQueueName());
+        if (qoS != null) {
+            mapData.setQualityOfService(qoS);
+        }
+        ComputationalResourceSchedulingModel scheduling = taskContext.getProcessModel().getProcessResourceSchedule();
+        if (scheduling != null) {
+            int totalNodeCount = scheduling.getNodeCount();
+            int totalCPUCount = scheduling.getTotalCPUCount();
+
+            if (isValid(scheduling.getQueueName())) {
+                mapData.setQueueName(scheduling.getQueueName());
+            }
+            if (totalNodeCount > 0) {
+                mapData.setNodes(totalCPUCount);
+            }
+            if (totalCPUCount > 0) {
+                int ppn = totalCPUCount / totalNodeCount;
+                mapData.setProcessPerNode(ppn);
+                mapData.setCpuCount(totalCPUCount);
+            }
+            // max wall time may be set before this level if jobsubmission task has wall time configured to this job,
+            // if so we ignore scheduling configuration.
+            if (scheduling.getWallTimeLimit() > 0 && mapData.getMaxWallTime() == null) {
+                mapData.setMaxWallTime(maxWallTimeCalculator(scheduling.getWallTimeLimit()));
+
+                // TODO fix this
+                /*
+                if (resourceJobManager != null) {
+                    if (resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)) {
+                        mapData.setMaxWallTime(maxWallTimeCalculatorForLSF(scheduling.getWallTimeLimit()));
+                    }
+                }
+                */
+            }
+            if (scheduling.getTotalPhysicalMemory() > 0) {
+                mapData.setUsedMem(scheduling.getTotalPhysicalMemory());
+            }
+            if (isValid(scheduling.getOverrideLoginUserName())) {
+                mapData.setUserName(scheduling.getOverrideLoginUserName());
+            }
+            if (isValid(scheduling.getOverrideAllocationProjectNumber())) {
+                mapData.setAccountString(scheduling.getOverrideAllocationProjectNumber());
+            }
+            if (isValid(scheduling.getStaticWorkingDir())) {
+                mapData.setWorkingDirectory(scheduling.getStaticWorkingDir());
+            }
+        } else {
+            logger.error("Task scheduling cannot be null at this point..");
+        }
+
+        ApplicationDeploymentDescription appDepDescription = taskContext.getApplicationDeploymentDescription();
+
+        List<SetEnvPaths> exportCommands = appDepDescription.getSetEnvironment();
+        if (exportCommands != null) {
+            List<String> exportCommandList = exportCommands.stream()
+                    .sorted((e1, e2) -> e1.getEnvPathOrder() - e2.getEnvPathOrder())
+                    .map(map -> map.getName() + "=" + map.getValue())
+                    .collect(Collectors.toList());
+            mapData.setExports(exportCommandList);
+        }
+
+        List<CommandObject> moduleCmds = appDepDescription.getModuleLoadCmds();
+        if (moduleCmds != null) {
+            List<String> modulesCmdCollect = moduleCmds.stream()
+                    .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
+                    .map(map -> map.getCommand())
+                    .collect(Collectors.toList());
+            mapData.setModuleCommands(modulesCmdCollect);
+        }
+
+        List<CommandObject> preJobCommands = appDepDescription.getPreJobCommands();
+        if (preJobCommands != null) {
+            List<String> preJobCmdCollect = preJobCommands.stream()
+                    .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
+                    .map(map -> parseCommands(map.getCommand(), mapData))
+                    .collect(Collectors.toList());
+            mapData.setPreJobCommands(preJobCmdCollect);
+        }
+
+        List<CommandObject> postJobCommands = appDepDescription.getPostJobCommands();
+        if (postJobCommands != null) {
+            List<String> postJobCmdCollect = postJobCommands.stream()
+                    .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
+                    .map(map -> parseCommands(map.getCommand(), mapData))
+                    .collect(Collectors.toList());
+            mapData.setPostJobCommands(postJobCmdCollect);
+        }
+
+        ApplicationParallelismType parallelism = appDepDescription.getParallelism();
+        if (parallelism != null) {
+            if (parallelism != ApplicationParallelismType.SERIAL) {
+                Map<ApplicationParallelismType, String> parallelismPrefix = taskContext.getResourceJobManager().getParallelismPrefix();
+                if (parallelismPrefix != null){
+                    String parallelismCommand = parallelismPrefix.get(parallelism);
+                    if (parallelismCommand != null){
+                        mapData.setJobSubmitterCommand(parallelismCommand);
+                    }else {
+                        throw new Exception("Parallelism prefix is not defined for given parallelism type " + parallelism + ".. Please define the parallelism prefix at App Catalog");
+                    }
+                }
+            }
+        }
+
+        return mapData;
+    }
+
+    public static int generateJobName() {
+        Random random = new Random();
+        int i = random.nextInt(Integer.MAX_VALUE);
+        i = i + 99999999;
+        if (i < 0) {
+            i = i * (-1);
+        }
+        return i;
+    }
+
+    private static List<String> getProcessInputValues(List<InputDataObjectType> processInputs, boolean commandLineOnly) {
+        List<String> inputValues = new ArrayList<String>();
+        if (processInputs != null) {
+
+            // sort the inputs first and then build the command ListR
+            Comparator<InputDataObjectType> inputOrderComparator = new Comparator<InputDataObjectType>() {
+                @Override
+                public int compare(InputDataObjectType inputDataObjectType, InputDataObjectType t1) {
+                    return inputDataObjectType.getInputOrder() - t1.getInputOrder();
+                }
+            };
+            Set<InputDataObjectType> sortedInputSet = new TreeSet<InputDataObjectType>(inputOrderComparator);
+            for (InputDataObjectType input : processInputs) {
+                sortedInputSet.add(input);
+            }
+            for (InputDataObjectType inputDataObjectType : sortedInputSet) {
+                if (commandLineOnly && !inputDataObjectType.isRequiredToAddedToCommandLine()) {
+                    continue;
+                }
+                if (inputDataObjectType.getApplicationArgument() != null
+                        && !inputDataObjectType.getApplicationArgument().equals("")) {
+                    inputValues.add(inputDataObjectType.getApplicationArgument());
+                }
+
+                if (inputDataObjectType.getValue() != null
+                        && !inputDataObjectType.getValue().equals("")) {
+                    if (inputDataObjectType.getType() == DataType.URI) {
+                        // set only the relative path
+                        String filePath = inputDataObjectType.getValue();
+                        filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
+                        inputValues.add(filePath);
+                    } else if (inputDataObjectType.getType() == DataType.URI_COLLECTION) {
+                        String filePaths = inputDataObjectType.getValue();
+                        String[] paths = filePaths.split(MULTIPLE_INPUTS_SPLITTER);
+                        String filePath;
+                        String inputs = "";
+                        int i = 0;
+                        for (; i < paths.length - 1; i++) {
+                            filePath = paths[i];
+                            filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
+                            // File names separate by a space
+                            inputs += filePath + " ";
+                        }
+                        inputs += paths[i];
+                        inputValues.add(inputs);
+                    } else {
+                        inputValues.add(inputDataObjectType.getValue());
+                    }
+
+                }
+            }
+        }
+        return inputValues;
+    }
+
+    private static List<String> getProcessOutputValues(List<OutputDataObjectType> processOutputs, boolean commandLineOnly) {
+        List<String> inputValues = new ArrayList<>();
+        if (processOutputs != null) {
+            for (OutputDataObjectType output : processOutputs) {
+                if (output.getApplicationArgument() != null
+                        && !output.getApplicationArgument().equals("")) {
+                    inputValues.add(output.getApplicationArgument());
+                }
+                if(commandLineOnly){
+                    if (output.getValue() != null && !output.getValue().equals("") && output.isRequiredToAddedToCommandLine()) {
+                        if (output.getType() == DataType.URI) {
+                            String filePath = output.getValue();
+                            filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
+                            inputValues.add(filePath);
+                        }
+                    }
+                }else{
+                    if (output.getValue() != null && !output.getValue().equals("")) {
+                        if (output.getType() == DataType.URI) {
+                            String filePath = output.getValue();
+                            filePath = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length());
+                            inputValues.add(filePath);
+                        }
+                    }
+                }
+
+            }
+        }
+        return inputValues;
+    }
+
+    static String getQoS(String qualityOfService, String preferredBatchQueue) {
+        if(preferredBatchQueue == null  || preferredBatchQueue.isEmpty()
+                ||  qualityOfService == null  || qualityOfService.isEmpty()) return null;
+        final String qos = "qos";
+        Pattern pattern = Pattern.compile(preferredBatchQueue + "=(?<" + qos + ">[^,]*)");
+        Matcher matcher = pattern.matcher(qualityOfService);
+        if (matcher.find()) {
+            return matcher.group(qos);
+        }
+        return null;
+    }
+
+    public static String maxWallTimeCalculator(int maxWalltime) {
+        if (maxWalltime < 60) {
+            return "00:" + maxWalltime + ":00";
+        } else {
+            int minutes = maxWalltime % 60;
+            int hours = maxWalltime / 60;
+            return hours + ":" + minutes + ":00";
+        }
+    }
+
+    public static String maxWallTimeCalculatorForLSF(int maxWalltime) {
+        if (maxWalltime < 60) {
+            return "00:" + maxWalltime;
+        } else {
+            int minutes = maxWalltime % 60;
+            int hours = maxWalltime / 60;
+            return hours + ":" + minutes;
+        }
+    }
+
+    private static boolean isValid(String str) {
+        return str != null && !str.isEmpty();
+    }
+
+    static String parseCommands(String value, GroovyMapData bindMap) {
+        TemplateEngine templateEngine = new GStringTemplateEngine();
+        try {
+            return templateEngine.createTemplate(value).make(bindMap.toImmutableMap()).toString();
+        } catch (ClassNotFoundException | IOException e) {
+            throw new IllegalArgumentException("Error while parsing command " + value
+                    + " , Invalid command or incomplete bind map");
+        }
+    }
+
+}
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
index ec75fb7..995f772 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
@@ -1,6 +1,14 @@
 package org.apache.airavata.helix.impl.task.submission;
 
+import com.google.common.collect.ImmutableMap;
+import groovy.lang.Writable;
+import groovy.text.GStringTemplateEngine;
+import groovy.text.TemplateEngine;
+import org.apache.airavata.common.utils.ApplicationSettings;
+
+import java.io.File;
 import java.lang.reflect.Field;
+import java.net.URL;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -35,7 +43,7 @@ public class GroovyMapData {
     private String applicationName;
 
     @ScriptTag(name = "queueSpecificMacros")
-    private String queueSpecificMacros;
+    private List<String> queueSpecificMacros;
 
     @ScriptTag(name = "accountString")
     private String accountString;
@@ -206,13 +214,12 @@ public class GroovyMapData {
         return this;
     }
 
-    public String getQueueSpecificMacros() {
+    public List<String> getQueueSpecificMacros() {
         return queueSpecificMacros;
     }
 
-    public GroovyMapData setQueueSpecificMacros(String queueSpecificMacros) {
+    public void setQueueSpecificMacros(List<String> queueSpecificMacros) {
         this.queueSpecificMacros = queueSpecificMacros;
-        return this;
     }
 
     public String getAccountString() {
@@ -412,4 +419,40 @@ public class GroovyMapData {
         this.chassisName = chassisName;
         return this;
     }
+
+    public Map toImmutableMap() {
+
+        Map<String, Object> dataMap = new HashMap<>();
+        Field[] declaredFields = this.getClass().getDeclaredFields();
+        for (Field field : declaredFields) {
+            field.setAccessible(true);
+            if (field.getAnnotation(ScriptTag.class) != null) {
+                try {
+                    dataMap.put(field.getAnnotation(ScriptTag.class).name(), field.get(this));
+                } catch (IllegalAccessException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return dataMap;
+    }
+
+    public String getAsString(String templateName) throws Exception {
+        URL templateUrl = ApplicationSettings.loadFile(templateName);
+        if (templateUrl == null) {
+            String error = "Template file '" + templateName + "' not found";
+            throw new Exception(error);
+        }
+        File template = new File(templateUrl.getPath());
+        TemplateEngine engine = new GStringTemplateEngine();
+        Writable make;
+        try {
+
+            make = engine.createTemplate(template).make(toImmutableMap());
+        } catch (Exception e) {
+            throw new Exception("Error while generating script using groovy map");
+        }
+        return make.toString();
+    }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
index fb9917f..fab4747 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task.submission.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
 import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
@@ -36,25 +37,24 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
     @Override
     public TaskResult onRun(TaskHelper taskHelper) {
         try {
-            GroovyMapData groovyMapData = new GroovyMapData();
 
+            GroovyMapData mapData = new GroovyMapBuilder(getTaskContext()).build();
 
             JobModel jobModel = new JobModel();
             jobModel.setProcessId(getProcessId());
-            jobModel.setWorkingDir(groovyMapData.getWorkingDirectory());
+            jobModel.setWorkingDir(mapData.getWorkingDirectory());
             jobModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
             jobModel.setTaskId(getTaskId());
-            jobModel.setJobName(groovyMapData.getJobName());
+            jobModel.setJobName(mapData.getJobName());
 
-            File jobFile = SubmissionUtil.createJobFile(groovyMapData);
+            if (mapData != null) {
+                //jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
+                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                        getTaskContext().getComputeResourceId(),
+                        getTaskContext().getJobSubmissionProtocol().name(),
+                        getTaskContext().getComputeResourceCredentialToken());
 
-
-            if (jobFile != null && jobFile.exists()) {
-                jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
-                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
-                        getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
-
-                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, jobFile, groovyMapData.getWorkingDirectory());
+                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
 
                 jobModel.setExitCode(submissionOutput.getExitCode());
                 jobModel.setStdErr(submissionOutput.getStdErr());
@@ -137,7 +137,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                         String loadCommand = getComputeResourceDescription().getGatewayUsageModuleLoadCommand();
                         String usageExecutable = getComputeResourceDescription().getGatewayUsageExecutable();
                         ExperimentModel experiment = (ExperimentModel)getExperimentCatalog().get(ExperimentCatalogModelType.EXPERIMENT, getExperimentId());
-                        String username = experiment.getUserName() + "@" + getGatewayComputeResourcePreference().getUsageReportingGatewayId();
+                        String username = experiment.getUserName() + "@" + getTaskContext().getGatewayComputeResourcePreference().getUsageReportingGatewayId();
                         RawCommandInfo rawCommandInfo = new RawCommandInfo(loadCommand + " && " + usageExecutable + " -gateway_user " +  username  +
                                 " -submit_time \"`date '+%F %T %:z'`\"  -jobid " + jobId );
                         adaptor.executeCommand(rawCommandInfo.getRawCommand(), null);
@@ -150,7 +150,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                 } else {
                     int verificationTryCount = 0;
                     while (verificationTryCount++ < 3) {
-                        String verifyJobId = verifyJobSubmission(adaptor, jobModel.getJobName(), getComputeResourceLoginUserName());
+                        String verifyJobId = verifyJobSubmission(adaptor, jobModel.getJobName(), getTaskContext().getComputeResourceLoginUserName());
                         if (verifyJobId != null && !verifyJobId.isEmpty()) {
                             // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
                             jobId = verifyJobId;
@@ -194,17 +194,12 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                 }
 
             }  else {
+                return onFail("Job data is null", true, null);
+                //  taskStatus.setReason("JobFile is null");
                 //taskStatus.setState(TaskState.FAILED);
-                if (jobFile == null) {
-                    return onFail("Job file is null", true, null);
-                  //  taskStatus.setReason("JobFile is null");
-                } else {
-                    //taskStatus.setReason("Job file doesn't exist");
-                    return onFail("Job file doesn't exist", true, null);
-                }
             }
         } catch (Exception e) {
-            return onFail("Task failed due to unexpected issue", false, null);
+            return onFail("Task failed due to unexpected issue", false, e);
         }
         // TODO get rid of this
         return onFail("Task moved to an unknown state", false, null);
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
index da04365..58b70ef 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task.submission.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
 import org.apache.airavata.helix.task.api.TaskHelper;
@@ -23,23 +24,23 @@ public class ForkJobSubmissionTask extends JobSubmissionTask {
     public TaskResult onRun(TaskHelper taskHelper) {
 
         try {
-            GroovyMapData groovyMapData = new GroovyMapData();
+            GroovyMapData mapData = new GroovyMapBuilder(getTaskContext()).build();
 
             JobModel jobModel = new JobModel();
             jobModel.setProcessId(getProcessId());
-            jobModel.setWorkingDir(groovyMapData.getWorkingDirectory());
+            jobModel.setWorkingDir(mapData.getWorkingDirectory());
             jobModel.setCreationTime(AiravataUtils.getCurrentTimestamp().getTime());
             jobModel.setTaskId(getTaskId());
-            jobModel.setJobName(groovyMapData.getJobName());
+            jobModel.setJobName(mapData.getJobName());
 
-            File jobFile = SubmissionUtil.createJobFile(groovyMapData);
+            if (mapData != null) {
+                //jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
+                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                        getTaskContext().getComputeResourceId(),
+                        getTaskContext().getJobSubmissionProtocol().name(),
+                        getTaskContext().getComputeResourceCredentialToken());
 
-            if (jobFile != null && jobFile.exists()) {
-                jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
-                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
-                        getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
-
-                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, jobFile, groovyMapData.getWorkingDirectory());
+                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
 
                 jobModel.setExitCode(submissionOutput.getExitCode());
                 jobModel.setStdErr(submissionOutput.getStdErr());
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
index fe5a3dc..11e59eb 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
@@ -4,11 +4,14 @@ import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.CommandOutput;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.impl.task.AiravataTask;
+import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.config.JobFactory;
 import org.apache.airavata.helix.impl.task.submission.config.JobManagerConfiguration;
 import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
 import org.apache.airavata.messaging.core.MessageContext;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
@@ -23,9 +26,11 @@ import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.MessageType;
 import org.apache.airavata.model.status.JobStatus;
 import org.apache.airavata.registry.cpi.*;
+import org.apache.commons.io.FileUtils;
 import org.apache.helix.HelixManager;
 
 import java.io.File;
+import java.security.SecureRandom;
 import java.util.*;
 
 public abstract class JobSubmissionTask extends AiravataTask {
@@ -38,10 +43,19 @@ public abstract class JobSubmissionTask extends AiravataTask {
     }
 
     //////////////////////
-    protected JobSubmissionOutput submitBatchJob(AgentAdaptor agentAdaptor, File jobFile, String workingDirectory) throws Exception {
+    protected JobSubmissionOutput submitBatchJob(AgentAdaptor agentAdaptor, GroovyMapData groovyMapData, String workingDirectory) throws Exception {
         JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
-                getAppCatalog(), getJobSubmissionProtocol(), getPreferredJobSubmissionInterface()));
-        RawCommandInfo submitCommand = jobManagerConfiguration.getSubmitCommand(workingDirectory, jobFile.getPath());
+                getAppCatalog(), getTaskContext().getJobSubmissionProtocol(), getTaskContext().getPreferredJobSubmissionInterface()));
+
+        String scriptAsString = groovyMapData.getAsString(jobManagerConfiguration.getJobDescriptionTemplateName());
+
+        int number = new SecureRandom().nextInt();
+        number = (number < 0 ? -number : number);
+        File tempJobFile = new File(getLocalDataDir(), "job_" + Integer.toString(number) + jobManagerConfiguration.getScriptExtension());
+        FileUtils.writeStringToFile(tempJobFile, scriptAsString);
+
+        // TODO transfer file
+        RawCommandInfo submitCommand = jobManagerConfiguration.getSubmitCommand(workingDirectory, tempJobFile.getPath());
         CommandOutput commandOutput = agentAdaptor.executeCommand(submitCommand.getRawCommand(), workingDirectory);
 
         JobSubmissionOutput jsoutput = new JobSubmissionOutput();
@@ -63,12 +77,17 @@ public abstract class JobSubmissionTask extends AiravataTask {
         jsoutput.setStdOut(commandOutput.getStdOut());
         jsoutput.setStdErr(commandOutput.getStdError());
         return jsoutput;
+    }
 
+    public File getLocalDataDir() {
+        String outputPath = ServerSettings.getLocalDataLocation();
+        outputPath = (outputPath.endsWith(File.separator) ? outputPath : outputPath + File.separator);
+        return new File(outputPath + getProcessId());
     }
 
     public JobStatus getJobStatus(AgentAdaptor agentAdaptor, String jobID) throws Exception {
         JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
-                getAppCatalog(), getJobSubmissionProtocol(), getPreferredJobSubmissionInterface()));
+                getAppCatalog(), getTaskContext().getJobSubmissionProtocol(), getTaskContext().getPreferredJobSubmissionInterface()));
         CommandOutput commandOutput = agentAdaptor.executeCommand(jobManagerConfiguration.getMonitorCommand(jobID).getRawCommand(), null);
 
         return jobManagerConfiguration.getParser().parseJobStatus(jobID, commandOutput.getStdOut());
@@ -77,7 +96,7 @@ public abstract class JobSubmissionTask extends AiravataTask {
 
     public String getJobIdByJobName(AgentAdaptor agentAdaptor, String jobName, String userName) throws Exception {
         JobManagerConfiguration jobManagerConfiguration = JobFactory.getJobManagerConfiguration(JobFactory.getResourceJobManager(
-                getAppCatalog(), getJobSubmissionProtocol(), getPreferredJobSubmissionInterface()));
+                getAppCatalog(), getTaskContext().getJobSubmissionProtocol(), getTaskContext().getPreferredJobSubmissionInterface()));
 
         RawCommandInfo jobIdMonitorCommand = jobManagerConfiguration.getJobIdMonitorCommand(jobName, userName);
         CommandOutput commandOutput = agentAdaptor.executeCommand(jobIdMonitorCommand.getRawCommand(), null);
@@ -159,44 +178,4 @@ public abstract class JobSubmissionTask extends AiravataTask {
 
     ///////////// required for groovy map
 
-    private String workingDir;
-    private String scratchLocation;
-    private UserComputeResourcePreference userComputeResourcePreference;
-
-    public String getWorkingDir() {
-        if (workingDir == null) {
-            if (getProcessModel().getProcessResourceSchedule().getStaticWorkingDir() != null){
-                workingDir = getProcessModel().getProcessResourceSchedule().getStaticWorkingDir();
-            }else {
-                String scratchLocation = getScratchLocation();
-                workingDir = (scratchLocation.endsWith("/") ? scratchLocation + getProcessId() : scratchLocation + "/" +
-                        getProcessId());
-            }
-        }
-        return workingDir;
-    }
-
-    public String getScratchLocation() {
-        if (scratchLocation == null) {
-            if (isUseUserCRPref() &&
-                    userComputeResourcePreference != null &&
-                    isValid(userComputeResourcePreference.getScratchLocation())) {
-                scratchLocation = userComputeResourcePreference.getScratchLocation();
-            } else if (isValid(processModel.getProcessResourceSchedule().getOverrideScratchLocation())) {
-                scratchLocation = processModel.getProcessResourceSchedule().getOverrideScratchLocation();
-            }else {
-                scratchLocation = gatewayComputeResourcePreference.getScratchLocation();
-            }
-        }
-        return scratchLocation;
-    }
-
-    protected UserComputeResourcePreference userComputeResourcePreference() throws AppCatalogException {
-        UserComputeResourcePreference userComputeResourcePreference =
-                getAppCatalog().getUserResourceProfile().getUserComputeResourcePreference(
-                        getProcessModel().getUserName(),
-                        getGatewayId(),
-                        getProcessModel().getComputeResourceId());
-    }
-
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
index 5a3ca31..67ad0db 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task.submission.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
 import org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask;
@@ -41,10 +42,13 @@ public class LocalJobSubmissionTask extends JobSubmissionTask {
                 jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
                 saveJobModel(jobModel);
 
-                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(getComputeResourceId(),
-                        getJobSubmissionProtocol().name(), getComputeResourceCredentialToken());
+                AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                        getTaskContext().getComputeResourceId(),
+                        getTaskContext().getJobSubmissionProtocol().name(),
+                        getTaskContext().getComputeResourceCredentialToken());
 
-                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, jobFile, groovyMapData.getWorkingDirectory());
+                GroovyMapData mapData = new GroovyMapBuilder(getTaskContext()).build();
+                JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, groovyMapData.getWorkingDirectory());
 
                 JobStatus jobStatus = new JobStatus();
                 jobStatus.setJobState(JobState.SUBMITTED);
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
index 51feff4..397ff45 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
@@ -20,7 +20,7 @@ public class SimpleWorkflow {
         defaultJobSubmissionTask.setGatewayId("default");
         defaultJobSubmissionTask.setExperimentId("Clone_of_Mothur-Test1_0c9f627e-2c32-403e-a28a-2a8b10c21c1a");
         defaultJobSubmissionTask.setProcessId("PROCESS_438a87cc-2dec-4edc-bfeb-31128df91bb6");
-        defaultJobSubmissionTask.setTaskId(UUID.randomUUID().toString());
+        defaultJobSubmissionTask.setTaskId("TASK_612844a4-aedb-41a5-824f-9b20c76867f7");
 
         List<AbstractTask> tasks = new ArrayList<>();
         tasks.add(defaultJobSubmissionTask);
diff --git a/modules/helix-spectator/src/main/resources/application.properties b/modules/helix-spectator/src/main/resources/application.properties
index 41c5e5f..a9b0969 100644
--- a/modules/helix-spectator/src/main/resources/application.properties
+++ b/modules/helix-spectator/src/main/resources/application.properties
@@ -1,3 +1,3 @@
 zookeeper.connection.url=localhost:2199
 helix.cluster.name=AiravataDemoCluster
-participant.name=all-p1
\ No newline at end of file
+participant.name=all-p2
\ No newline at end of file

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 12/17: Logging improvements

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 71075e0d6bfcf00047fe271188eff3cf46a8982b
Author: dimuthu <di...@gmail.com>
AuthorDate: Mon Mar 5 23:09:33 2018 -0500

    Logging improvements
---
 .../helix/core/participant/HelixParticipant.java   |  1 +
 .../airavata/helix/impl/task/AiravataTask.java     | 83 +++++++++++++++-------
 .../airavata/helix/impl/task/CompletingTask.java   |  5 +-
 .../airavata/helix/impl/task/EnvSetupTask.java     |  5 +-
 .../helix/impl/task/InputDataStagingTask.java      |  4 +-
 .../helix/impl/task/OutputDataStagingTask.java     |  7 +-
 .../submission/task/DefaultJobSubmissionTask.java  |  5 +-
 .../submission/task/ForkJobSubmissionTask.java     |  5 +-
 .../submission/task/LocalJobSubmissionTask.java    |  5 +-
 .../helix/impl/workflow/PostWorkflowManager.java   |  5 +-
 .../src/main/resources/log4j.properties            |  2 +-
 11 files changed, 84 insertions(+), 43 deletions(-)

diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
index 190b866..503f5ca 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
@@ -142,6 +142,7 @@ public class HelixParticipant <T extends AbstractTask> implements Runnable {
 
             // register task model
             machineEngine.registerStateModelFactory("Task", new TaskStateModelFactory(zkHelixManager, getTaskFactory()));
+
             logger.debug("Participant: " + participantName + ", registered state model factories.");
 
             zkHelixManager.connect();
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
index 03dedf3..289cfc5 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
@@ -4,6 +4,7 @@ import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.helix.core.AbstractTask;
 import org.apache.airavata.helix.core.OutPort;
+import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskOutPort;
 import org.apache.airavata.helix.task.api.annotation.TaskParam;
 import org.apache.airavata.messaging.core.MessageContext;
@@ -22,6 +23,7 @@ import org.apache.helix.HelixManager;
 import org.apache.helix.task.TaskResult;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
+import org.apache.log4j.MDC;
 
 import java.io.PrintWriter;
 import java.io.StringWriter;
@@ -34,7 +36,6 @@ public abstract class AiravataTask extends AbstractTask {
     private ExperimentCatalog experimentCatalog;
     private Publisher statusPublisher;
     private ProcessModel processModel;
-
     private ComputeResourceDescription computeResourceDescription;
 
     private TaskContext taskContext;
@@ -52,7 +53,7 @@ public abstract class AiravataTask extends AbstractTask {
     private OutPort nextTask;
 
     protected TaskResult onSuccess(String message) {
-        String successMessage = "Task " + getTaskId() + " completed." + message != null ? " Message : " + message : "";
+        String successMessage = "Task " + getTaskId() + " completed." + (message != null ? " Message : " + message : "");
         logger.info(successMessage);
         return nextTask.invoke(new TaskResult(TaskResult.Status.COMPLETED, message));
     }
@@ -89,14 +90,14 @@ public abstract class AiravataTask extends AbstractTask {
         return new TaskResult(fatal ? TaskResult.Status.FATAL_FAILED : TaskResult.Status.FAILED, errorMessage);
     }
 
-    public void saveAndPublishProcessStatus(ProcessState state) {
+    protected void saveAndPublishProcessStatus(ProcessState state) {
         ProcessStatus processStatus = new ProcessStatus(state);
         processStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
         getTaskContext().setProcessStatus(processStatus);
         saveAndPublishProcessStatus();
     }
 
-    public void saveAndPublishProcessStatus() {
+    protected void saveAndPublishProcessStatus() {
         try {
             ProcessStatus status = taskContext.getProcessStatus();
             if (status.getTimeOfStateChange() == 0 || status.getTimeOfStateChange() > 0 ){
@@ -116,7 +117,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
-    public void saveAndPublishTaskStatus() {
+    protected void saveAndPublishTaskStatus() {
         try {
             TaskState state = getTaskContext().getTaskState();
             // first we save job jobModel to the registry for sa and then save the job status.
@@ -139,7 +140,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
-    public void saveExperimentError(ErrorModel errorModel) {
+    protected void saveExperimentError(ErrorModel errorModel) {
         try {
             errorModel.setErrorId(AiravataUtils.getId("EXP_ERROR"));
             getExperimentCatalog().add(ExpCatChildDataType.EXPERIMENT_ERROR, errorModel, experimentId);
@@ -149,7 +150,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
-    public void saveProcessError(ErrorModel errorModel) {
+    protected void saveProcessError(ErrorModel errorModel) {
         try {
             errorModel.setErrorId(AiravataUtils.getId("PROCESS_ERROR"));
             experimentCatalog.add(ExpCatChildDataType.PROCESS_ERROR, errorModel, getProcessId());
@@ -160,7 +161,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
-    public void saveTaskError(ErrorModel errorModel) throws Exception {
+    protected void saveTaskError(ErrorModel errorModel) throws Exception {
         try {
             errorModel.setErrorId(AiravataUtils.getId("TASK_ERROR"));
             getExperimentCatalog().add(ExpCatChildDataType.TASK_ERROR, errorModel, getTaskId());
@@ -171,7 +172,7 @@ public abstract class AiravataTask extends AbstractTask {
         }
     }
 
-    public Publisher getStatusPublisher() throws AiravataException {
+    protected Publisher getStatusPublisher() throws AiravataException {
         if (statusPublisher == null) {
             synchronized (RabbitMQPublisher.class) {
                 if (statusPublisher == null) {
@@ -183,10 +184,47 @@ public abstract class AiravataTask extends AbstractTask {
     }
 
     @Override
+    public TaskResult onRun(TaskHelper helper) {
+
+        try {
+            MDC.put("experiment", getExperimentId());
+            MDC.put("process", getProcessId());
+            MDC.put("gateway", getGatewayId());
+            MDC.put("task", getTaskId());
+            return onRun(helper, getTaskContext());
+        } finally {
+            MDC.clear();
+        }
+    }
+
+    public abstract TaskResult onRun(TaskHelper helper, TaskContext taskContext);
+
+    @Override
+    public void onCancel() {
+        try {
+            MDC.put("experiment", getExperimentId());
+            MDC.put("process", getProcessId());
+            MDC.put("gateway", getGatewayId());
+            MDC.put("task", getTaskId());
+            onCancel(getTaskContext());
+        } finally {
+            MDC.clear();
+        }
+    }
+
+    public abstract void onCancel(TaskContext taskContext);
+
+
+    @Override
     public void init(HelixManager manager, String workflowName, String jobName, String taskName) {
         super.init(manager, workflowName, jobName, taskName);
+        MDC.put("experiment", getExperimentId());
+        MDC.put("process", getProcessId());
+        MDC.put("gateway", getGatewayId());
+        MDC.put("task", getTaskId());
         try {
             appCatalog = RegistryFactory.getAppCatalog();
+            //logger.info("Gateway id is " + getGatewayId());
             experimentCatalog = RegistryFactory.getExperimentCatalog(getGatewayId());
             processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
 
@@ -208,12 +246,12 @@ public abstract class AiravataTask extends AbstractTask {
                                     .getStoragePreference(gatewayId, processModel.getStorageResourceId()));
 
             this.taskContext = taskContextBuilder.build();
-        } catch (AppCatalogException e) {
-            e.printStackTrace();
-        } catch (RegistryException e) {
-            e.printStackTrace();
+            logger.info("Task " + taskName + " intitialized");
         } catch (Exception e) {
-            e.printStackTrace();
+            logger.error("Error occurred while initializing the task " + getTaskId() + " of experiment " + getExperimentId(), e);
+           throw new RuntimeException("Error occurred while initializing the task " + getTaskId() + " of experiment " + getExperimentId(), e);
+        } finally {
+            MDC.clear();
         }
     }
 
@@ -236,20 +274,15 @@ public abstract class AiravataTask extends AbstractTask {
         msgCtx.setUpdatedTime(AiravataUtils.getCurrentTimestamp());
     }
 
-    //////////////////////////
-
-    public ComputeResourceDescription getComputeResourceDescription() {
+    protected ComputeResourceDescription getComputeResourceDescription() {
         return computeResourceDescription;
     }
 
-    ////////////////////////
-
-
-    public TaskContext getTaskContext() {
+    protected TaskContext getTaskContext() {
         return taskContext;
     }
 
-    public ExperimentCatalog getExperimentCatalog() {
+    protected ExperimentCatalog getExperimentCatalog() {
         return experimentCatalog;
     }
 
@@ -261,7 +294,7 @@ public abstract class AiravataTask extends AbstractTask {
         this.processId = processId;
     }
 
-    public String getExperimentId() {
+    protected String getExperimentId() {
         return experimentId;
     }
 
@@ -269,7 +302,7 @@ public abstract class AiravataTask extends AbstractTask {
         this.experimentId = experimentId;
     }
 
-    public String getGatewayId() {
+    protected String getGatewayId() {
         return gatewayId;
     }
 
@@ -277,7 +310,7 @@ public abstract class AiravataTask extends AbstractTask {
         this.gatewayId = gatewayId;
     }
 
-    public ProcessModel getProcessModel() {
+    protected ProcessModel getProcessModel() {
         return processModel;
     }
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java
index 9ec2909..d036258 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/CompletingTask.java
@@ -13,14 +13,15 @@ public class CompletingTask extends AiravataTask {
     private static final Logger logger = LogManager.getLogger(CompletingTask.class);
 
     @Override
-    public TaskResult onRun(TaskHelper helper) {
+    public TaskResult onRun(TaskHelper helper, TaskContext taskContext) {
+        logger.info("Starting completing task for task " + getTaskId() + ", experiment id " + getExperimentId());
         logger.info("Process " + getProcessId() + " successfully completed");
         saveAndPublishProcessStatus(ProcessState.COMPLETED);
         return onSuccess("Process " + getProcessId() + " successfully completed");
     }
 
     @Override
-    public void onCancel() {
+    public void onCancel(TaskContext taskContext) {
 
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
index abdc1bf..0ad5698 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
@@ -16,7 +16,7 @@ public class EnvSetupTask extends AiravataTask {
     private static final Logger logger = LogManager.getLogger(EnvSetupTask.class);
 
     @Override
-    public TaskResult onRun(TaskHelper taskHelper) {
+    public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
         try {
 
             saveAndPublishProcessStatus(ProcessState.CONFIGURING_WORKSPACE);
@@ -32,6 +32,7 @@ public class EnvSetupTask extends AiravataTask {
             adaptor.createDirectory(getTaskContext().getWorkingDir());
             publishTaskState(TaskState.COMPLETED);
             return onSuccess("Envi setup task successfully completed " + getTaskId());
+
         } catch (Exception e) {
             try {
                 publishTaskState(TaskState.FAILED);
@@ -45,7 +46,7 @@ public class EnvSetupTask extends AiravataTask {
     }
 
     @Override
-    public void onCancel() {
+    public void onCancel(TaskContext taskContext) {
 
     }
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
index ed143dd..2c885f4 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/InputDataStagingTask.java
@@ -23,7 +23,7 @@ public class InputDataStagingTask extends DataStagingTask {
     private static final Logger logger = LogManager.getLogger(InputDataStagingTask.class);
 
     @Override
-    public TaskResult onRun(TaskHelper taskHelper) {
+    public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
         logger.info("Starting Input Data Staging Task " + getTaskId());
 
         saveAndPublishProcessStatus(ProcessState.INPUT_DATA_STAGING);
@@ -110,7 +110,7 @@ public class InputDataStagingTask extends DataStagingTask {
     }
 
     @Override
-    public void onCancel() {
+    public void onCancel(TaskContext taskContext) {
 
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
index ff8fd2e..738d955 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/OutputDataStagingTask.java
@@ -27,9 +27,9 @@ public class OutputDataStagingTask extends DataStagingTask {
     private static final Logger logger = LogManager.getLogger(OutputDataStagingTask.class);
 
     @Override
-    public TaskResult onRun(TaskHelper taskHelper) {
+    public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
 
-        logger.info("Starting output data staging task " + getTaskId());
+        logger.info("Starting output data staging task " + getTaskId() + " in experiment " + getExperimentId());
         saveAndPublishProcessStatus(ProcessState.OUTPUT_DATA_STAGING);
 
         try {
@@ -51,7 +51,6 @@ public class OutputDataStagingTask extends DataStagingTask {
             }
 
             // Fetch and validate storage resource
-            // Fetch and validate storage resource
             StorageResourceDescription storageResource = getStorageResource();
 
             // Fetch and validate source and destination URLS
@@ -212,7 +211,7 @@ public class OutputDataStagingTask extends DataStagingTask {
     }
 
     @Override
-    public void onCancel() {
+    public void onCancel(TaskContext taskContext) {
 
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
index 688f894..9b015bb 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task.submission.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.config.RawCommandInfo;
@@ -29,7 +30,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
     public static final String DEFAULT_JOB_ID = "DEFAULT_JOB_ID";
 
     @Override
-    public TaskResult onRun(TaskHelper taskHelper) {
+    public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
 
         try {
             saveAndPublishProcessStatus(ProcessState.EXECUTING);
@@ -227,7 +228,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
     }
 
     @Override
-    public void onCancel() {
+    public void onCancel(TaskContext taskContext) {
 
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
index e3b5447..afce74e 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task.submission.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
@@ -21,7 +22,7 @@ import java.util.Arrays;
 public class ForkJobSubmissionTask extends JobSubmissionTask {
 
     @Override
-    public TaskResult onRun(TaskHelper taskHelper) {
+    public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
 
         try {
             GroovyMapData mapData = new GroovyMapBuilder(getTaskContext()).build();
@@ -76,7 +77,7 @@ public class ForkJobSubmissionTask extends JobSubmissionTask {
     }
 
     @Override
-    public void onCancel() {
+    public void onCancel(TaskContext taskContext) {
 
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
index cea6750..3e51b4f 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
@@ -3,6 +3,7 @@ package org.apache.airavata.helix.impl.task.submission.task;
 import org.apache.airavata.agents.api.AgentAdaptor;
 import org.apache.airavata.agents.api.JobSubmissionOutput;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapBuilder;
 import org.apache.airavata.helix.impl.task.submission.GroovyMapData;
 import org.apache.airavata.helix.impl.task.submission.SubmissionUtil;
@@ -23,7 +24,7 @@ import java.util.UUID;
 public class LocalJobSubmissionTask extends JobSubmissionTask {
 
     @Override
-    public TaskResult onRun(TaskHelper taskHelper) {
+    public TaskResult onRun(TaskHelper taskHelper, TaskContext taskContext) {
 
         try {
             GroovyMapData groovyMapData = new GroovyMapData();
@@ -81,7 +82,7 @@ public class LocalJobSubmissionTask extends JobSubmissionTask {
     }
 
     @Override
-    public void onCancel() {
+    public void onCancel(TaskContext taskContext) {
 
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
index 07a9aee..b4ffacf 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
@@ -126,7 +126,7 @@ public class PostWorkflowManager {
                 String status = getStatusByJobId(jobStatusResult.getJobId());
 
                 logger.info("Starting the post workflow for job id : " + jobStatusResult.getJobId() + " with process id "
-                        + processId + ", gateway " + gateway + " and status " + status);
+                        + processId + ", gateway " + gateway + " and status " + jobStatusResult.getState().name());
 
                 // TODO get cluster lock before that
                 if ("cancelled".equals(status)) {
@@ -181,6 +181,9 @@ public class PostWorkflowManager {
                         completingTask.setExperimentId(experimentModel.getExperimentId());
                         completingTask.setProcessId(processModel.getProcessId());
                         completingTask.setTaskId("Completing-Task");
+                        if (allTasks.size() > 0) {
+                            allTasks.get(allTasks.size() - 1).setNextTask(new OutPort(completingTask.getTaskId(), completingTask));
+                        }
                         allTasks.add(completingTask);
 
                         WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster",
diff --git a/modules/helix-spectator/src/main/resources/log4j.properties b/modules/helix-spectator/src/main/resources/log4j.properties
index 69a4301..dba82a1 100644
--- a/modules/helix-spectator/src/main/resources/log4j.properties
+++ b/modules/helix-spectator/src/main/resources/log4j.properties
@@ -10,4 +10,4 @@ log4j.appender.A1=org.apache.log4j.ConsoleAppender
 
 # A1 uses PatternLayout.
 log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] [E=%X{experiment},P=%X{process},T=%X{task},G=%X{gateway}] %-5p %c %x - %m%n
\ No newline at end of file

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 07/17: Standalone email monitor initial implementation

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 4e1c1b0f1156f6453b81b7a58a30a196545e2489
Author: dimuthu <di...@gmail.com>
AuthorDate: Fri Mar 2 16:58:42 2018 -0500

    Standalone email monitor initial implementation
---
 modules/job-monitor/pom.xml                        |  38 +++
 .../airavata/job/monitor/EmailBasedMonitor.java    | 309 +++++++++++++++++++
 .../monitor/parser/AiravataCustomMailParser.java   |  77 +++++
 .../airavata/job/monitor/parser/EmailParser.java   |  34 +++
 .../job/monitor/parser/JobStatusResult.java        |  63 ++++
 .../job/monitor/parser/LSFEmailParser.java         |  78 +++++
 .../job/monitor/parser/PBSEmailParser.java         | 105 +++++++
 .../job/monitor/parser/ResourceConfig.java         |  54 ++++
 .../job/monitor/parser/SLURMEmailParser.java       |  83 +++++
 .../job/monitor/parser/UGEEmailParser.java         | 109 +++++++
 .../src/main/resources/airavata-server.properties  | 334 +++++++++++++++++++++
 .../src/main/resources/email-config.yaml           |  20 ++
 .../src/main/resources/log4j.properties            |   9 +
 pom.xml                                            |   3 +
 14 files changed, 1316 insertions(+)

diff --git a/modules/job-monitor/pom.xml b/modules/job-monitor/pom.xml
new file mode 100644
index 0000000..c536a14
--- /dev/null
+++ b/modules/job-monitor/pom.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>job-monitor</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+            <version>1.7.25</version>
+        </dependency>
+        <dependency>
+            <groupId>javax.mail</groupId>
+            <artifactId>mail</artifactId>
+            <version>1.4.5</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-commons</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.yaml</groupId>
+            <artifactId>snakeyaml</artifactId>
+            <version>1.15</version>
+        </dependency>
+    </dependencies>
+
+</project>
\ No newline at end of file
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java
new file mode 100644
index 0000000..7b13354
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/EmailBasedMonitor.java
@@ -0,0 +1,309 @@
+package org.apache.airavata.job.monitor;
+
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.job.monitor.parser.EmailParser;
+import org.apache.airavata.job.monitor.parser.JobStatusResult;
+import org.apache.airavata.job.monitor.parser.ResourceConfig;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import javax.mail.Address;
+import javax.mail.Flags;
+import javax.mail.Folder;
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import javax.mail.Session;
+import javax.mail.Store;
+import javax.mail.search.FlagTerm;
+import javax.mail.search.SearchTerm;
+import java.io.FileReader;
+import java.io.InputStream;
+import java.io.Reader;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class EmailBasedMonitor implements Runnable {
+
+    private static final Logger log = LoggerFactory.getLogger(EmailBasedMonitor.class);
+
+    public static final int COMPARISON = 6; // after and equal
+    public static final String IMAPS = "imaps";
+    public static final String POP3 = "pop3";
+    private boolean stopMonitoring = false;
+
+    private Session session ;
+    private Store store;
+    private Folder emailFolder;
+    private Properties properties;
+    //private Map<String, TaskContext> jobMonitorMap = new ConcurrentHashMap<>();
+    private String host, emailAddress, password, storeProtocol, folderName ;
+    private Date monitorStartDate;
+    private Map<ResourceJobManagerType, EmailParser> emailParserMap = new HashMap<ResourceJobManagerType, EmailParser>();
+    private Map<String, ResourceJobManagerType> addressMap = new HashMap<>();
+    private Message[] flushUnseenMessages;
+    private Map<String, Boolean> canceledJobs = new ConcurrentHashMap<>();
+    private Timer timer;
+    private Map<ResourceJobManagerType, ResourceConfig> resourceConfigs = new HashMap<>();
+
+
+    public EmailBasedMonitor() throws Exception {
+        init();
+        populateAddressAndParserMap(resourceConfigs);
+    }
+
+    private void init() throws Exception {
+        loadContext();
+        host = ServerSettings.getEmailBasedMonitorHost();
+        emailAddress = ServerSettings.getEmailBasedMonitorAddress();
+        password = ServerSettings.getEmailBasedMonitorPassword();
+        storeProtocol = ServerSettings.getEmailBasedMonitorStoreProtocol();
+        folderName = ServerSettings.getEmailBasedMonitorFolderName();
+        if (!(storeProtocol.equals(IMAPS) || storeProtocol.equals(POP3))) {
+            throw new AiravataException("Unsupported store protocol , expected " +
+                    IMAPS + " or " + POP3 + " but found " + storeProtocol);
+        }
+        properties = new Properties();
+        properties.put("mail.store.protocol", storeProtocol);
+        timer = new Timer("CancelJobHandler", true);
+        long period = 1000 * 60 * 5; // five minute delay between successive task executions.
+    }
+
+    private void loadContext() throws Exception {
+        Yaml yaml = new Yaml();
+        InputStream emailConfigStream = EmailBasedMonitor.class.getClassLoader().getResourceAsStream("email-config.yaml");
+        Object load = yaml.load(emailConfigStream);
+
+        if (load == null) {
+            throw new Exception("Could not load the configuration");
+        }
+
+        if (load instanceof Map) {
+            Map<String, Object> loadMap = (Map<String, Object>) load;
+            Map<String, Object> configMap = (Map<String, Object>) loadMap.get("config");
+            List<Map<String,Object >> resourceObjs = (List<Map<String, Object>>) configMap.get("resources");
+            if (resourceObjs != null) {
+                resourceObjs.forEach(resource -> {
+                    ResourceConfig resourceConfig = new ResourceConfig();
+                    String identifier = resource.get("jobManagerType").toString();
+                    resourceConfig.setJobManagerType(ResourceJobManagerType.valueOf(identifier));
+                    Object emailParser = resource.get("emailParser");
+                    if (emailParser != null){
+                        resourceConfig.setEmailParser(emailParser.toString());
+                    }
+                    List<String> emailAddressList = (List<String>) resource.get("resourceEmailAddresses");
+                    resourceConfig.setResourceEmailAddresses(emailAddressList);
+                    resourceConfigs.put(resourceConfig.getJobManagerType(), resourceConfig);
+                });
+            }
+        }
+        populateAddressAndParserMap(resourceConfigs);
+    }
+
+    private void populateAddressAndParserMap(Map<ResourceJobManagerType, ResourceConfig> resourceConfigs) throws AiravataException {
+        for (Map.Entry<ResourceJobManagerType, ResourceConfig> resourceConfigEntry : resourceConfigs.entrySet()) {
+            ResourceJobManagerType type = resourceConfigEntry.getKey();
+            ResourceConfig config = resourceConfigEntry.getValue();
+            List<String> resourceEmailAddresses = config.getResourceEmailAddresses();
+            if (resourceEmailAddresses != null && !resourceEmailAddresses.isEmpty()){
+                for (String resourceEmailAddress : resourceEmailAddresses) {
+                    addressMap.put(resourceEmailAddress, type);
+                }
+                try {
+                    Class<? extends EmailParser> emailParserClass = Class.forName(config.getEmailParser()).asSubclass(EmailParser.class);
+                    EmailParser emailParser = emailParserClass.getConstructor().newInstance();
+                    emailParserMap.put(type, emailParser);
+                } catch (Exception e) {
+                    throw new AiravataException("Error while instantiation email parsers", e);
+                }
+            }
+        }
+
+    }
+
+    public void monitor(String jobId) {
+        log.info("[EJM]: Added monitor Id : {} to email based monitor map", jobId);
+    }
+
+    public void stopMonitor(String jobId, boolean runOutflow) {
+
+    }
+
+    public boolean isMonitoring(String jobId) {
+        return true;
+    }
+
+    public void canceledJob(String jobId) {
+
+    }
+
+    private JobStatusResult parse(Message message) throws MessagingException, AiravataException {
+        Address fromAddress = message.getFrom()[0];
+        String addressStr = fromAddress.toString();
+        ResourceJobManagerType jobMonitorType = getJobMonitorType(addressStr);
+        EmailParser emailParser = emailParserMap.get(jobMonitorType);
+        if (emailParser == null) {
+            throw new AiravataException("[EJM]: Un-handle resource job manager type: " + jobMonitorType
+                    .toString() + " for email monitoring -->  " + addressStr);
+        }
+        return emailParser.parseEmail(message);
+    }
+
+    private ResourceJobManagerType getJobMonitorType(String addressStr) throws AiravataException {
+//        System.out.println("*********** address ******** : " + addressStr);
+        for (Map.Entry<String, ResourceJobManagerType> addressEntry : addressMap.entrySet()) {
+            if (addressStr.contains(addressEntry.getKey())) {
+                return addressEntry.getValue();
+            }
+        }
+        throw new AiravataException("[EJM]: Couldn't identify Resource job manager type from address " + addressStr);
+    }
+
+    @Override
+    public void run() {
+        boolean quite = false;
+
+        while (!stopMonitoring && !ServerSettings.isStopAllThreads()) {
+            try {
+                session = Session.getDefaultInstance(properties);
+                store = session.getStore(storeProtocol);
+                store.connect(host, emailAddress, password);
+                emailFolder = store.getFolder(folderName);
+                // first time we search for all unread messages.
+                SearchTerm unseenBefore = new FlagTerm(new Flags(Flags.Flag.SEEN), false);
+                while (!(stopMonitoring || ServerSettings.isStopAllThreads())) {
+                    Thread.sleep(ServerSettings.getEmailMonitorPeriod());// sleep a bit - get a rest till job finishes
+                    if (!store.isConnected()) {
+                        store.connect();
+                        emailFolder = store.getFolder(folderName);
+                    }
+                    log.info("[EJM]: Retrieving unseen emails");
+                    emailFolder.open(Folder.READ_WRITE);
+                    if (emailFolder.isOpen()) {
+                        // flush if any message left in flushUnseenMessage
+                        if (flushUnseenMessages != null && flushUnseenMessages.length > 0) {
+                            try {
+                                emailFolder.setFlags(flushUnseenMessages, new Flags(Flags.Flag.SEEN), false);
+                                flushUnseenMessages = null;
+                            } catch (MessagingException e) {
+                                if (!store.isConnected()) {
+                                    store.connect();
+                                    emailFolder.setFlags(flushUnseenMessages, new Flags(Flags.Flag.SEEN), false);
+                                    flushUnseenMessages = null;
+                                }
+                            }
+                        }
+                        Message[] searchMessages = emailFolder.search(unseenBefore);
+                        if (searchMessages == null || searchMessages.length == 0) {
+                            log.info("[EJM]: No new email messages");
+                        } else {
+                            log.info("[EJM]: " + searchMessages.length + " new email/s received");
+                        }
+                        processMessages(searchMessages);
+                        emailFolder.close(false);
+                    }
+                }
+            } catch (MessagingException e) {
+                log.error("[EJM]: Couldn't connect to the store ", e);
+            } catch (InterruptedException e) {
+                log.error("[EJM]: Interrupt exception while sleep ", e);
+            } catch (AiravataException e) {
+                log.error("[EJM]: UnHandled arguments ", e);
+            } catch (Throwable e)  {
+                log.error("[EJM]: Caught a throwable ", e);
+            } finally {
+                try {
+                    emailFolder.close(false);
+                    store.close();
+                } catch (MessagingException e) {
+                    log.error("[EJM]: Store close operation failed, couldn't close store", e);
+                } catch (Throwable e) {
+                    log.error("[EJM]: Caught a throwable while closing email store ", e);
+                }
+            }
+        }
+        log.info("[EJM]: Email monitoring daemon stopped");
+    }
+
+    private void processMessages(Message[] searchMessages) throws MessagingException {
+        List<Message> processedMessages = new ArrayList<>();
+        List<Message> unreadMessages = new ArrayList<>();
+        for (Message message : searchMessages) {
+            try {
+                JobStatusResult jobStatusResult = parse(message);
+                log.info(jobStatusResult.getJobId() + ", " + jobStatusResult.getJobName() + ", " + jobStatusResult.getState().getValue());
+                //processedMessages.add(message);
+                unreadMessages.add(message);
+            } catch (Exception e) {
+                unreadMessages.add(message);
+            }
+        }
+        if (!processedMessages.isEmpty()) {
+            Message[] seenMessages = new Message[processedMessages.size()];
+            processedMessages.toArray(seenMessages);
+            try {
+                emailFolder.setFlags(seenMessages, new Flags(Flags.Flag.SEEN), true);
+            } catch (MessagingException e) {
+                if (!store.isConnected()) {
+                    store.connect();
+                    emailFolder.setFlags(seenMessages, new Flags(Flags.Flag.SEEN), true);
+                }
+            }
+
+        }
+        if (!unreadMessages.isEmpty()) {
+            Message[] unseenMessages = new Message[unreadMessages.size()];
+            unreadMessages.toArray(unseenMessages);
+            try {
+                emailFolder.setFlags(unseenMessages, new Flags(Flags.Flag.SEEN), false);
+            } catch (MessagingException e) {
+                if (!store.isConnected()) {
+                    store.connect();
+                    emailFolder.setFlags(unseenMessages, new Flags(Flags.Flag.SEEN), false);
+                    flushUnseenMessages = unseenMessages; // anyway we need to push this update.
+                } else {
+                    flushUnseenMessages = unseenMessages; // anyway we need to push this update.
+                }
+            }
+        }
+    }
+
+    private void process(JobStatusResult jobStatusResult){
+
+    }
+
+    private void writeEnvelopeOnError(Message m) throws MessagingException {
+        Address[] a;
+        // FROM
+        if ((a = m.getFrom()) != null) {
+            for (int j = 0; j < a.length; j++)
+                log.error("FROM: " + a[j].toString());
+        }
+        // TO
+        if ((a = m.getRecipients(Message.RecipientType.TO)) != null) {
+            for (int j = 0; j < a.length; j++)
+                log.error("TO: " + a[j].toString());
+        }
+        // SUBJECT
+        if (m.getSubject() != null)
+            log.error("SUBJECT: " + m.getSubject());
+    }
+
+    public void stopMonitoring() {
+        stopMonitoring = true;
+    }
+
+    public void setDate(Date date) {
+        this.monitorStartDate = date;
+    }
+
+    public static void main(String args[]) throws Exception {
+        EmailBasedMonitor monitor = new EmailBasedMonitor();
+        Thread t = new Thread(monitor);
+        t.start();
+        t.join();
+    }
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/AiravataCustomMailParser.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/AiravataCustomMailParser.java
new file mode 100644
index 0000000..871c956
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/AiravataCustomMailParser.java
@@ -0,0 +1,77 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.model.status.JobState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class AiravataCustomMailParser implements EmailParser {
+
+    private static final Logger log = LoggerFactory.getLogger(SLURMEmailParser.class);
+
+    private static final String REGEX = "[a-zA-Z]*_[a-z]*=(?<" + JOBID + ">\\d*)[ ]*[a-zA-Z]*=(?<" +
+            JOBNAME + ">[a-zA-Z0-9-]*)[ ]*[a-zA-Z]*=(?<" + STATUS + ">[a-zA-Z]*).*";
+
+    public static final String COMPLETED = "COMPLETED";
+    private static final Pattern pattern = Pattern.compile(REGEX);
+
+    @Override
+    public JobStatusResult parseEmail(Message message) throws MessagingException, AiravataException {
+        JobStatusResult jobStatusResult = new JobStatusResult();
+        parseSubject(message.getSubject(), jobStatusResult);
+        return jobStatusResult;
+    }
+
+    private void parseSubject(String subject, JobStatusResult jobStatusResult) throws MessagingException {
+        Matcher matcher = pattern.matcher(subject);
+        if (matcher.find()) {
+            jobStatusResult.setJobId(matcher.group(JOBID));
+            jobStatusResult.setJobName(matcher.group(JOBNAME));
+            jobStatusResult.setState(getJobState(matcher.group(STATUS)));
+            jobStatusResult.setAuthoritative(false);
+
+            try {
+                //Waiting some time for the scheduler to move the job from completing to completed.
+                Thread.sleep(5000);
+            } catch (Exception ex) {
+            }
+
+        } else {
+            log.error("[EJM]: No matched found for subject -> " + subject);
+        }
+    }
+
+    private JobState getJobState(String state) {
+        switch (state.trim()) {
+            case COMPLETED:
+                return JobState.COMPLETE;
+            default:
+                log.error("[EJM]: Job State " + state + " isn't handle by Airavata custom parser");
+                return JobState.UNKNOWN;
+        }
+    }
+}
\ No newline at end of file
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/EmailParser.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/EmailParser.java
new file mode 100644
index 0000000..2bb3cc0
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/EmailParser.java
@@ -0,0 +1,34 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+import org.apache.airavata.common.exception.AiravataException;
+
+import javax.mail.Message;
+import javax.mail.MessagingException;
+
+public interface EmailParser {
+    static final String STATUS = "status";
+    static final String JOBID = "jobId";
+    static final String JOBNAME = "jobName";
+    static final String EXIT_STATUS = "exitStatus";
+
+    JobStatusResult parseEmail(Message message) throws MessagingException, AiravataException;
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/JobStatusResult.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/JobStatusResult.java
new file mode 100644
index 0000000..6019ae6
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/JobStatusResult.java
@@ -0,0 +1,63 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+
+import org.apache.airavata.model.status.JobState;
+
+public class JobStatusResult {
+    private JobState state;
+    private String jobId;
+    private String jobName;
+    private boolean authoritative = true;
+
+    public String getJobName() {
+        return jobName;
+    }
+
+    public void setJobName(String jobName) {
+        this.jobName = jobName;
+    }
+
+    public JobState getState() {
+        return state;
+    }
+
+    public void setState(JobState state) {
+        this.state = state;
+    }
+
+    public String getJobId() {
+        return jobId;
+    }
+
+    public void setJobId(String jobId) {
+        this.jobId = jobId;
+    }
+
+    public boolean isAuthoritative() {
+        return authoritative;
+    }
+
+    public void setAuthoritative(boolean authoritative) {
+        this.authoritative = authoritative;
+    }
+}
+
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/LSFEmailParser.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/LSFEmailParser.java
new file mode 100644
index 0000000..70d6f9d
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/LSFEmailParser.java
@@ -0,0 +1,78 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.model.status.JobState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import java.io.IOException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class LSFEmailParser implements EmailParser {
+    private static final Logger log = LoggerFactory.getLogger(LSFEmailParser.class);
+    private static final String REGEX = "[a-zA-Z]+\\s+(?<" + JOBID + ">[\\d]+):\\s+<(?<" + JOBNAME + ">[a-zA-Z0-9]+)>\\s+(?<" + STATUS + ">[a-zA-Z]+)";
+    public static final String STARTED = "started";
+    public static final String COMPLETE = "Done";
+    public static final String FAILED = "Exited";
+
+    @Override
+    public JobStatusResult parseEmail(Message message) throws MessagingException, AiravataException {
+        JobStatusResult jobStatusResult = new JobStatusResult();
+
+        parseContent(message, jobStatusResult);
+        return jobStatusResult;
+    }
+
+    private void parseContent(Message message, JobStatusResult jobStatusResult) throws MessagingException, AiravataException {
+        String subject = message.getSubject();
+        Pattern pattern = Pattern.compile(REGEX);
+        Matcher matcher = pattern.matcher(subject);
+        try {
+            if (matcher.find()) {
+                jobStatusResult.setJobId(matcher.group(JOBID));
+                jobStatusResult.setJobName(matcher.group(JOBNAME));
+                String content = (String) message.getContent();
+                jobStatusResult.setState(getJobState(matcher.group(STATUS), content));
+            } else {
+                log.error("[EJM]: No matched found for subject => \n" + subject);
+            }
+        } catch (IOException e) {
+            throw new AiravataException("[EJM]: Error while reading content of the email message");
+        }
+    }
+
+    private JobState getJobState(String status, String content) {
+        switch (status) {
+            case STARTED:
+                return JobState.ACTIVE;
+            case COMPLETE:
+                return JobState.COMPLETE;
+            case FAILED:
+                return JobState.FAILED;
+            default:
+                return JobState.UNKNOWN;
+        }
+    }
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/PBSEmailParser.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/PBSEmailParser.java
new file mode 100644
index 0000000..12fd5cb
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/PBSEmailParser.java
@@ -0,0 +1,105 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.model.status.JobState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import java.io.IOException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class PBSEmailParser implements EmailParser {
+    private static final Logger log = LoggerFactory.getLogger(PBSEmailParser.class);
+    public static final String BEGUN_EXECUTION = "Begun execution";
+    public static final String EXECUTION_TERMINATED = "Execution terminated";
+    public static final String ABORTED_BY_PBS_SERVER = "Aborted by PBS Server";
+
+    static final String REGEX = "[a-zA-Z ]*:[ ]*(?<" + JOBID + ">[a-zA-Z0-9-_\\.]*)\\s+[a-zA-Z ]*:[ ]*(?<" +
+            JOBNAME + ">[a-zA-Z0-9-\\.]*)\\s[\\S|\\s]*(?<" + STATUS + ">" + BEGUN_EXECUTION + "|" +
+            EXECUTION_TERMINATED + "|" + ABORTED_BY_PBS_SERVER + ")";
+
+    private static final String REGEX_EXIT_STATUS = "Exit_status=(?<" + EXIT_STATUS + ">[\\d]+)";
+
+    @Override
+    public JobStatusResult parseEmail(Message message) throws MessagingException, AiravataException {
+        JobStatusResult jobStatusResult = new JobStatusResult();
+//        log.info("Parsing -> " + message.getSubject());
+        try {
+            String content = ((String) message.getContent());
+            parseContent(content, jobStatusResult);
+        } catch (IOException e) {
+            throw new AiravataException("[EJM]: Error while reading content of the email message");
+        }
+        return jobStatusResult;
+    }
+
+    void parseContent(String content, JobStatusResult jobStatusResult) throws MessagingException, AiravataException {
+        content = content.replaceAll("[^\\x00-\\x7F]", "");
+        Pattern pattern = Pattern.compile(REGEX);
+        Matcher matcher = pattern.matcher(content);
+        if (matcher.find()) {
+            jobStatusResult.setJobId(matcher.group(JOBID));
+            jobStatusResult.setJobName(matcher.group(JOBNAME));
+            String statusLine = matcher.group(STATUS);
+            jobStatusResult.setState(getJobState(statusLine, content));
+        } else {
+            log.error("[EJM]: No matched found for content => \n" + content);
+        }
+    }
+
+    private JobState getJobState(String statusLine, String content) {
+        switch (statusLine) {
+            case BEGUN_EXECUTION:
+                return JobState.ACTIVE;
+            case EXECUTION_TERMINATED:
+                int exitStatus = getExitStatus(content);
+                if (exitStatus == 0) {
+                    // TODO - Remove rabbitmq client script line from the script.
+                    return JobState.COMPLETE;
+                } else if (exitStatus == 271) {
+                    return JobState.CANCELED;
+                } else {
+                    return JobState.FAILED;
+                }
+            case ABORTED_BY_PBS_SERVER:
+                return JobState.FAILED;
+            default:
+                return JobState.UNKNOWN;
+        }
+    }
+
+    private int getExitStatus(String content) {
+        Pattern pattern = Pattern.compile(REGEX_EXIT_STATUS);
+        Matcher matcher = pattern.matcher(content);
+        if (matcher.find()) {
+            String group = matcher.group(EXIT_STATUS);
+            if (group != null && !group.trim().isEmpty()) {
+                return Integer.valueOf(group.trim());
+            }
+        }
+        return -1;
+    }
+
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/ResourceConfig.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/ResourceConfig.java
new file mode 100644
index 0000000..aacbc2d
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/ResourceConfig.java
@@ -0,0 +1,54 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+
+import java.util.List;
+
+public class ResourceConfig {
+	private ResourceJobManagerType jobManagerType;
+	private String emailParser;
+	private List<String> resourceEmailAddresses;
+
+	public ResourceJobManagerType getJobManagerType() {
+		return jobManagerType;
+	}
+
+	public void setJobManagerType(ResourceJobManagerType jobManagerType) {
+		this.jobManagerType = jobManagerType;
+	}
+
+	public String getEmailParser() {
+		return emailParser;
+	}
+
+	public void setEmailParser(String emailParser) {
+		this.emailParser = emailParser;
+	}
+
+	public List<String> getResourceEmailAddresses() {
+		return resourceEmailAddresses;
+	}
+
+	public void setResourceEmailAddresses(List<String> resourceEmailAddresses) {
+		this.resourceEmailAddresses = resourceEmailAddresses;
+	}
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/SLURMEmailParser.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/SLURMEmailParser.java
new file mode 100644
index 0000000..d06c8b1
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/SLURMEmailParser.java
@@ -0,0 +1,83 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.model.status.JobState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class SLURMEmailParser implements EmailParser {
+
+    private static final Logger log = LoggerFactory.getLogger(SLURMEmailParser.class);
+
+    private static final String REGEX = "[A-Z]*\\s[a-zA-Z]*_[a-z]*=(?<" + JOBID + ">\\d*)[ ]*[a-zA-Z]*=(?<"+
+            JOBNAME + ">[a-zA-Z0-9-]*)[ ]*(?<" + STATUS + ">[]a-zA-Z ]*),.*";
+
+    public static final String BEGAN = "Began";
+    public static final String STAGE_OUT = "Staged Out";
+    public static final String ENDED = "Ended";
+    public static final String FAILED = "Failed";
+    private static final Pattern cancelledStatePattern = Pattern.compile("CANCELLED");
+    private static final Pattern pattern = Pattern.compile(REGEX);
+
+    @Override
+    public JobStatusResult parseEmail(Message message) throws MessagingException, AiravataException{
+        JobStatusResult jobStatusResult = new JobStatusResult();
+        parseSubject(message.getSubject(), jobStatusResult);
+        return jobStatusResult;
+    }
+
+    private void parseSubject(String subject, JobStatusResult jobStatusResult) throws MessagingException {
+        Matcher matcher = pattern.matcher(subject);
+        if (matcher.find()) {
+            jobStatusResult.setJobId(matcher.group(JOBID));
+            jobStatusResult.setJobName(matcher.group(JOBNAME));
+            jobStatusResult.setState(getJobState(matcher.group(STATUS), subject));
+        } else {
+            log.error("[EJM]: No matched found for subject -> " + subject);
+        }
+    }
+
+    private JobState getJobState(String state, String subject) {
+        switch (state.trim()) {
+            case BEGAN: case STAGE_OUT:
+                return JobState.ACTIVE;
+            case ENDED:
+                Matcher matcher = cancelledStatePattern.matcher(subject);
+                if (matcher.find()) {
+                   return JobState.CANCELED;
+                }
+                return JobState.COMPLETE;
+            case FAILED:
+                return JobState.FAILED;
+            default:
+                log.error("[EJM]: Job State " + state + " isn't handle by SLURM parser");
+                return JobState.UNKNOWN;
+
+        }
+    }
+
+}
diff --git a/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/UGEEmailParser.java b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/UGEEmailParser.java
new file mode 100644
index 0000000..da2866f
--- /dev/null
+++ b/modules/job-monitor/src/main/java/org/apache/airavata/job/monitor/parser/UGEEmailParser.java
@@ -0,0 +1,109 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.airavata.job.monitor.parser;
+
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.model.status.JobState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import java.io.IOException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class UGEEmailParser implements EmailParser {
+
+    private static final Logger log = LoggerFactory.getLogger(UGEEmailParser.class);
+    private static final String REGEX = "[\\w]*[ ]*(?<"+ JOBID + ">[\\d]*)[ ]*\\((?<" + JOBNAME
+            + ">[a-zA-Z0-9]*)\\)[ ]*(?<" + STATUS + ">[a-zA-Z]*)";
+    public static final String STARTED = "Started";
+    public static final String COMPLETE = "Complete";
+    public static final String FAILED = "Failed";
+    public static final String KILLED = "Killed";
+    private static final String REGEX_EXIT_STATUS = "Exit Status[ ]*=[ ]*(?<" + EXIT_STATUS + ">[\\d]+)";
+    public static final String ABORTED = "Aborted";
+
+
+    @Override
+    public JobStatusResult parseEmail(Message message) throws MessagingException, AiravataException {
+        JobStatusResult jobStatusResult = new JobStatusResult();
+
+        parseContent(message, jobStatusResult);
+        return jobStatusResult;
+    }
+
+    private void parseContent(Message message, JobStatusResult jobStatusResult) throws MessagingException, AiravataException {
+        String subject = message.getSubject();
+
+        //FIXME - HACK to handle Little Dog email issue from SIU
+        subject = subject.replace("Set in error state", "Failed");
+
+        Pattern pattern = Pattern.compile(REGEX);
+        Matcher matcher = pattern.matcher(subject);
+        try {
+            if (matcher.find()) {
+                jobStatusResult.setJobId(matcher.group(JOBID));
+                jobStatusResult.setJobName(matcher.group(JOBNAME));
+                String content = (String) message.getContent();
+                jobStatusResult.setState(getJobState(matcher.group(STATUS), content));
+            } else {
+                log.error("[EJM]: No matched found for subject => \n" + subject);
+            }
+        } catch (IOException e) {
+            throw new AiravataException("[EJM]: Error while reading content of the email message");
+        }
+    }
+
+    private JobState getJobState(String status, String content) {
+        switch (status) {
+            case STARTED:
+                return JobState.ACTIVE;
+            case COMPLETE:
+                int exitStatus = getExitStatus(content);
+                if (exitStatus == 0) {
+                    return JobState.COMPLETE;
+                } else {
+                    log.info("[EJM]: Job returns with Exit Status = " + exitStatus + "  , Marked as Failed");
+                    return JobState.FAILED;
+                }
+            case FAILED:
+                return JobState.FAILED;
+            case ABORTED:
+                return JobState.CANCELED;
+            default:
+                return JobState.UNKNOWN;
+
+        }
+    }
+
+    private int getExitStatus(String content) {
+        Pattern statusPattern = Pattern.compile(REGEX_EXIT_STATUS);
+        Matcher statusMatcher = statusPattern.matcher(content);
+        if (statusMatcher.find()) {
+            String group = statusMatcher.group(EXIT_STATUS);
+            if (group != null && !group.trim().isEmpty()) {
+                return Integer.valueOf(group.trim());
+            }
+        }
+        return -1;
+    }
+}
diff --git a/modules/job-monitor/src/main/resources/airavata-server.properties b/modules/job-monitor/src/main/resources/airavata-server.properties
new file mode 100644
index 0000000..b54b28c
--- /dev/null
+++ b/modules/job-monitor/src/main/resources/airavata-server.properties
@@ -0,0 +1,334 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+###########################################################################
+#
+#  This properties file provides configuration for all Airavata Services:
+#  API Server, Registry, Workflow Interpreter, GFac, Orchestrator
+#
+###########################################################################
+
+###########################################################################
+#  API Server Registry Configuration
+###########################################################################
+
+#for derby [AiravataJPARegistry]
+#registry.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#registry.jdbc.url=jdbc:derby://localhost:1527/experiment_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+registry.jdbc.driver=org.mariadb.jdbc.Driver
+registry.jdbc.url=jdbc:mariadb://149.165.168.248:3306/experiment_catalog
+registry.jdbc.user=eroma
+registry.jdbc.password=eroma123456
+#FIXME: Probably the following property should be removed.
+start.derby.server.mode=false
+validationQuery=SELECT 1 from CONFIGURATION
+cache.enable=false
+jpa.cache.size=-1
+#jpa.connection.properties=MaxActive=10,MaxIdle=5,MinIdle=2,MaxWait=60000,testWhileIdle=true,testOnBorrow=true
+enable.sharing=true
+
+# Properties for default user mode
+default.registry.user=default-admin
+default.registry.password=123456
+default.registry.password.hash.method=SHA
+default.registry.gateway=default
+super.tenant.gatewayId=default
+
+# Properties for cluster status monitoring
+# cluster status monitoring job repeat time in seconds
+cluster.status.monitoring.enable=false
+cluster.status.monitoring.repeat.time=18000
+
+###########################################################################
+#  Application Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#appcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#appcatalog.jdbc.url=jdbc:derby://localhost:1527/app_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+appcatalog.jdbc.driver=org.mariadb.jdbc.Driver
+appcatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/app_catalog
+appcatalog.jdbc.user=eroma
+appcatalog.jdbc.password=eroma123456
+appcatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+##########################################################################
+#  Replica Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#replicacatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#replicacatalog.jdbc.url=jdbc:derby://localhost:1527/replica_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+replicacatalog.jdbc.driver=org.mariadb.jdbc.Driver
+replicacatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/replica_catalog
+replicacatalog.jdbc.user=eroma
+replicacatalog.jdbc.password=eroma123456
+replicacatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+###########################################################################
+#  Workflow Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#workflowcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#workflowcatalog.jdbc.url=jdbc:derby://localhost:1527/workflow_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+workflowcatalog.jdbc.driver=org.mariadb.jdbc.Driver
+workflowcatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/replica_catalog
+workflowcatalog.jdbc.user=eroma
+workflowcatalog.jdbc.password=eroma123456
+workflowcatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+###########################################################################
+#  Sharing Catalog DB Configuration
+###########################################################################
+#for derby [AiravataJPARegistry]
+#sharingcatalog.jdbc.driver=org.apache.derby.jdbc.ClientDriver
+#sharingcatalog.jdbc.url=jdbc:derby://localhost:1527/sharing_catalog;create=true;user=airavata;password=airavata
+# MariaDB database configuration
+sharingcatalog.jdbc.driver=org.mariadb.jdbc.Driver
+sharingcatalog.jdbc.url=jdbc:mariadb://149.165.168.248:3306/sharing_catalog
+sharingcatalog.jdbc.user=eroma
+sharingcatalog.jdbc.password=eroma123456
+sharingcatalog.validationQuery=SELECT 1 from CONFIGURATION
+
+###########################################################################
+#  Sharing Registry Server Configuration
+###########################################################################
+sharing_server=org.apache.airavata.sharing.registry.server.SharingRegistryServer
+sharing.registry.server.host=192.168.99.102
+sharing.registry.server.port=7878
+
+###########################################################################
+#  User Profile MongoDB Configuration
+###########################################################################
+userprofile.mongodb.host=localhost
+userprofile.mongodb.port=27017
+
+
+###########################################################################
+#  Server module Configuration
+###########################################################################
+#credential store server should be started before API server
+#This is obsolete property with new script files.
+#servers=credentialstore,apiserver,orchestrator
+
+
+###########################################################################
+#  API Server Configurations
+###########################################################################
+apiserver=org.apache.airavata.api.server.AiravataAPIServer
+apiserver.name=apiserver-node0
+apiserver.host=192.168.99.102
+apiserver.port=8930
+apiserver.min.threads=50
+
+###########################################################################
+#  Orchestrator Server Configurations
+###########################################################################
+orchestrator=org.apache.airavata.orchestrator.server.OrchestratorServer
+orchestrator.server.name=orchestrator-node0
+orchestrator.server.host=192.168.99.102
+orchestrator.server.port=8940
+orchestrator.server.min.threads=50
+job.validators=org.apache.airavata.orchestrator.core.validator.impl.BatchQueueValidator,org.apache.airavata.orchestrator.core.validator.impl.ExperimentStatusValidator
+submitter.interval=10000
+threadpool.size=10
+start.submitter=true
+embedded.mode=true
+enable.validation=true
+
+###########################################################################
+#  Registry Server Configurations
+###########################################################################
+regserver=org.apache.airavata.registry.api.service.RegistryAPIServer
+regserver.server.name=regserver-node0
+regserver.server.host=192.168.99.102
+regserver.server.port=8970
+regserver.server.min.threads=50
+
+###########################################################################
+#  GFac Server Configurations
+###########################################################################
+gfac=org.apache.airavata.gfac.server.GfacServer
+gfac.server.name=gfac-node0
+gfac.server.host=10.0.2.15
+gfac.server.port=8950
+gfac.thread.pool.size=50
+host.scheduler=org.apache.airavata.gfac.impl.DefaultHostScheduler
+
+
+
+###########################################################################
+# Airavata Workflow Interpreter Configurations
+###########################################################################
+workflowserver=org.apache.airavata.api.server.WorkflowServer
+enactment.thread.pool.size=10
+
+#to define custom workflow parser user following property
+#workflow.parser=org.apache.airavata.workflow.core.parser.AiravataWorkflowBuilder
+
+
+
+###########################################################################
+#  Job Scheduler can send informative email messages to you about the status of your job.
+# Specify a string which consists of either the single character "n" (no mail), or one or more
+#  of the characters "a" (send mail when job is aborted), "b" (send mail when job begins),
+# and "e" (send mail when job terminates).  The default is "a" if not specified.
+###########################################################################
+
+job.notification.enable=true
+#Provide comma separated email ids as a string if more than one
+job.notification.emailids=
+job.notification.flags=abe
+
+###########################################################################
+# Credential Store module Configuration
+###########################################################################
+credential.store.keystore.url=/Users/dimuthu/code/fork/airavata/modules/helix-spectator/src/main/resources/cred_store.jks
+credential.store.keystore.alias=seckey
+credential.store.keystore.password=credstore123
+credential.store.jdbc.url=jdbc:mariadb://149.165.168.248:3306/credential_store
+credential.store.jdbc.user=eroma
+credential.store.jdbc.password=eroma123456
+credential.store.jdbc.driver=org.mariadb.jdbc.Driver
+credential.store.server.host=192.168.99.102
+credential.store.server.port=8960
+credentialstore=org.apache.airavata.credential.store.server.CredentialStoreServer
+credential.stroe.jdbc.validationQuery=SELECT 1 from CONFIGURATION
+
+# these properties used by credential store email notifications
+email.server=smtp.googlemail.com
+email.server.port=465
+email.user=airavata
+email.password=xxx
+email.ssl=true
+email.from=airavata@apache.org
+
+# SSH PKI key pair or ssh password can be used SSH based sshKeyAuthentication is used.
+# if user specify both password sshKeyAuthentication gets the higher preference
+
+################# ---------- For ssh key pair sshKeyAuthentication ------------------- ################
+#ssh.public.key=/path to public key for ssh
+#ssh.private.key=/path to private key file for ssh
+#ssh.keypass=passphrase for the private key
+#ssh.username=username for ssh connection
+## If you set "yes" for ssh.strict.hostKey.checking, then you must provide known hosts file path
+#ssh.strict.hostKey.checking=yes/no
+#ssh.known.hosts.file=/path to known hosts file
+### Incase of password sshKeyAuthentication.
+#ssh.password=Password for ssh connection
+
+################ ---------- BES Properties ------------------- ###############
+#bes.ca.cert.path=<location>/certificates/cacert.pem
+#bes.ca.key.path=<location>/certificates/cakey.pem
+#bes.ca.key.pass=passphrase
+
+###########################################################################
+# Monitoring module Configuration
+###########################################################################
+
+#This will be the primary monitoring tool which runs in airavata, in future there will be multiple monitoring
+#mechanisms and one would be able to start a monitor
+monitors=org.apache.airavata.gfac.monitor.impl.pull.qstat.QstatMonitor,org.apache.airavata.gfac.monitor.impl.LocalJobMonitor
+
+#These properties will used to enable email base monitoring
+email.based.monitor.host=imap.gmail.com
+email.based.monitor.address=ansibletestairavata@gmail.com
+email.based.monitor.password=ansibletestairavata123
+email.based.monitor.folder.name=INBOX
+# either imaps or pop3
+email.based.monitor.store.protocol=imaps
+#These property will be used to query the email server periodically. value in milliseconds(ms).
+email.based.monitoring.period=10000
+
+###########################################################################
+# AMQP Notification Configuration
+###########################################################################
+#for simple scenarios we can use the guest user
+#rabbitmq.broker.url=amqp://localhost:5672
+#for production scenarios, give url as amqp://userName:password@hostName:portNumber/virtualHost, create user, virtualhost
+# and give permissions, refer: http://blog.dtzq.com/2012/06/rabbitmq-users-and-virtual-hosts.html
+rabbitmq.broker.url=amqp://airavata:123456@192.168.99.102:5672/master
+rabbitmq.status.exchange.name=status_exchange
+rabbitmq.process.exchange.name=process_exchange
+rabbitmq.experiment.exchange.name=experiment_exchange
+durable.queue=false
+prefetch.count=200
+process.launch.queue.name=process.launch.queue
+experiment.launch..queue.name=experiment.launch.queue
+
+###########################################################################
+# Zookeeper Server Configuration
+###########################################################################
+embedded.zk=false
+zookeeper.server.connection=192.168.99.102:2181
+zookeeper.timeout=30000
+
+########################################################################
+## API Security Configuration
+########################################################################
+api.secured=false
+security.manager.class=org.apache.airavata.service.security.KeyCloakSecurityManager
+### TLS related configuration ####
+TLS.enabled=true
+TLS.api.server.port=9930
+TLS.client.timeout=10000
+#### keystore configuration ####
+keystore.path=/home/pga/master-deployment/keystores/airavata.jks
+keystore.password=password
+#### trust store configuration ####
+trust.store=/home/pga/master-deployment/keystores/client_truststore.jks
+trust.store.password=password
+#### authorization cache related configuration ####
+authz.cache.enabled=true
+authz.cache.manager.class=org.apache.airavata.service.security.authzcache.DefaultAuthzCacheManager
+in.memory.cache.size=1000
+
+# Kafka Logging related configuration
+isRunningOnAws=false
+kafka.broker.list=localhost:9092
+kafka.topic.prefix=local
+enable.kafka.logging=false
+
+###########################################################################
+# Profile Service Configuration
+###########################################################################
+profile.service.server.host=192.168.99.102
+profile.service.server.port=8962
+profile_service=org.apache.airavata.service.profile.server.ProfileServiceServer
+# MariaDB properties
+profile.service.jdbc.url=jdbc:mariadb://149.165.168.248:3306/profile_service
+profile.service.jdbc.user=eroma
+profile.service.jdbc.password=eroma123456
+profile.service.jdbc.driver=org.mariadb.jdbc.Driver
+profile.service.validationQuery=SELECT 1
+
+###########################################################################
+# Iam Admin services Configuration
+###########################################################################
+iam.server.url=https://192.168.99.102/auth
+iam.server.super.admin.username=admin
+iam.server.super.admin.password=123456
+
+###########################################################################
+# DB Event Manager Runner
+###########################################################################
+db_event_manager=org.apache.airavata.db.event.manager.DBEventManagerRunner
diff --git a/modules/job-monitor/src/main/resources/email-config.yaml b/modules/job-monitor/src/main/resources/email-config.yaml
new file mode 100644
index 0000000..9f0c38f
--- /dev/null
+++ b/modules/job-monitor/src/main/resources/email-config.yaml
@@ -0,0 +1,20 @@
+config:
+ resources:
+   - jobManagerType: PBS
+     emailParser: org.apache.airavata.job.monitor.parser.PBSEmailParser
+     resourceEmailAddresses:
+       - pbsconsult@sdsc.edu  # gordon
+       - adm@trident.bigred2.uits.iu.edu # Bigred2
+       - root <ad...@trident.bigred2.uits.iu.edu> # Bigred2
+       - root <ad...@scyld.localdomain> # alamo
+
+   - jobManagerType: SLURM
+     emailParser: org.apache.airavata.job.monitor.parser.SLURMEmailParser
+     resourceEmailAddresses:
+       - SDSC Admin <sl...@comet-fe3.sdsc.edu> # comet
+       - slurm@batch1.stampede.tacc.utexas.edu # stampede
+
+   - jobManagerType: UGE
+     emailParser: org.apache.airavata.job.monitor.parser.UGEEmailParser
+     resourceEmailAddresses:
+       - ls4.tacc.utexas.edu # contain Lonestar
\ No newline at end of file
diff --git a/modules/job-monitor/src/main/resources/log4j.properties b/modules/job-monitor/src/main/resources/log4j.properties
new file mode 100644
index 0000000..5e31e3c
--- /dev/null
+++ b/modules/job-monitor/src/main/resources/log4j.properties
@@ -0,0 +1,9 @@
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=INFO, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index bd99a2d..c8e72db 100644
--- a/pom.xml
+++ b/pom.xml
@@ -28,6 +28,9 @@
     <prerequisites>
         <maven>3.0</maven>
     </prerequisites>
+    <modules>
+        <module>modules/job-monitor</module>
+    </modules>
 
     <parent>
         <groupId>org.apache</groupId>

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 15/17: Moving helix-spectator module to airavata-helix module

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit a726a987afed70e0662c4f689c1e4a56a2837967
Author: dimuthu <di...@gmail.com>
AuthorDate: Wed Mar 7 14:02:47 2018 -0500

    Moving helix-spectator module to airavata-helix module
---
 modules/{ => airavata-helix}/helix-spectator/pom.xml                      | 0
 .../java/org/apache/airavata/helix/impl/controller/HelixController.java   | 0
 .../org/apache/airavata/helix/impl/participant/GlobalParticipant.java     | 0
 .../src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java   | 0
 .../src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java    | 0
 .../java/org/apache/airavata/helix/impl/task/TaskOnFailException.java     | 0
 .../org/apache/airavata/helix/impl/task/completing/CompletingTask.java    | 0
 .../main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java   | 0
 .../java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java | 0
 .../org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java | 0
 .../apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java    | 0
 .../airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java     | 0
 .../apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java | 0
 .../org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java | 0
 .../airavata/helix/impl/task/submission/LocalJobSubmissionTask.java       | 0
 .../airavata/helix/impl/task/submission/config/GroovyMapBuilder.java      | 0
 .../apache/airavata/helix/impl/task/submission/config/GroovyMapData.java  | 0
 .../org/apache/airavata/helix/impl/task/submission/config/JobFactory.java | 0
 .../helix/impl/task/submission/config/JobManagerConfiguration.java        | 0
 .../apache/airavata/helix/impl/task/submission/config/OutputParser.java   | 0
 .../apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java | 0
 .../org/apache/airavata/helix/impl/task/submission/config/Script.java     | 0
 .../org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java  | 0
 .../apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java | 0
 .../helix/impl/task/submission/config/app/ForkJobConfiguration.java       | 0
 .../apache/airavata/helix/impl/task/submission/config/app/JobUtil.java    | 0
 .../helix/impl/task/submission/config/app/LSFJobConfiguration.java        | 0
 .../helix/impl/task/submission/config/app/PBSJobConfiguration.java        | 0
 .../helix/impl/task/submission/config/app/SlurmJobConfiguration.java      | 0
 .../helix/impl/task/submission/config/app/UGEJobConfiguration.java        | 0
 .../submission/config/app/parser/AiravataCustomCommandOutputParser.java   | 0
 .../helix/impl/task/submission/config/app/parser/ForkOutputParser.java    | 0
 .../helix/impl/task/submission/config/app/parser/LSFOutputParser.java     | 0
 .../helix/impl/task/submission/config/app/parser/PBSOutputParser.java     | 0
 .../helix/impl/task/submission/config/app/parser/SlurmOutputParser.java   | 0
 .../helix/impl/task/submission/config/app/parser/UGEOutputParser.java     | 0
 .../java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java | 0
 .../java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java  | 0
 .../helix-spectator/src/main/resources/airavata-server.properties         | 0
 .../helix-spectator/src/main/resources/application.properties             | 0
 .../helix-spectator/src/main/resources/log4j.properties                   | 0
 41 files changed, 0 insertions(+), 0 deletions(-)

diff --git a/modules/helix-spectator/pom.xml b/modules/airavata-helix/helix-spectator/pom.xml
similarity index 100%
rename from modules/helix-spectator/pom.xml
rename to modules/airavata-helix/helix-spectator/pom.xml
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/completing/CompletingTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/env/EnvSetupTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/DataStagingTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/InputDataStagingTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/staging/OutputDataStagingTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/DefaultJobSubmissionTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/ForkJobSubmissionTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/JobSubmissionTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/LocalJobSubmissionTask.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapBuilder.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobFactory.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/JobManagerConfiguration.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/OutputParser.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/RawCommandInfo.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/Script.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/ScriptTag.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/SubmissionUtil.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/ForkJobConfiguration.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/JobUtil.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/LSFJobConfiguration.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/PBSJobConfiguration.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/SlurmJobConfiguration.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/UGEJobConfiguration.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/AiravataCustomCommandOutputParser.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/ForkOutputParser.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/LSFOutputParser.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/PBSOutputParser.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/SlurmOutputParser.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/app/parser/UGEOutputParser.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java b/modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
similarity index 100%
rename from modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
rename to modules/airavata-helix/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
diff --git a/modules/helix-spectator/src/main/resources/airavata-server.properties b/modules/airavata-helix/helix-spectator/src/main/resources/airavata-server.properties
similarity index 100%
rename from modules/helix-spectator/src/main/resources/airavata-server.properties
rename to modules/airavata-helix/helix-spectator/src/main/resources/airavata-server.properties
diff --git a/modules/helix-spectator/src/main/resources/application.properties b/modules/airavata-helix/helix-spectator/src/main/resources/application.properties
similarity index 100%
rename from modules/helix-spectator/src/main/resources/application.properties
rename to modules/airavata-helix/helix-spectator/src/main/resources/application.properties
diff --git a/modules/helix-spectator/src/main/resources/log4j.properties b/modules/airavata-helix/helix-spectator/src/main/resources/log4j.properties
similarity index 100%
rename from modules/helix-spectator/src/main/resources/log4j.properties
rename to modules/airavata-helix/helix-spectator/src/main/resources/log4j.properties

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 04/17: Fixing env setup task

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit cb54e4df2eb5ae453290a5d29cd3c0a8033c993d
Author: dimuthu <di...@gmail.com>
AuthorDate: Tue Feb 27 11:04:32 2018 -0500

    Fixing env setup task
---
 .../apache/airavata/helix/core/util/TaskUtil.java  | 19 ++++----
 .../airavata/helix/impl/task/AiravataTask.java     | 10 ++--
 .../airavata/helix/impl/task/EnvSetupTask.java     | 37 ++++-----------
 .../helix/impl/workflow/SimpleWorkflow.java        | 54 ++++++++++++++++++++--
 4 files changed, 75 insertions(+), 45 deletions(-)

diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java
index d0f1ab6..218bd94 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/util/TaskUtil.java
@@ -19,15 +19,18 @@ import java.util.Map;
  */
 public class TaskUtil {
 
-    public static <T extends AbstractTask> List<OutPort> getOutPortsOfTask(T task) throws IllegalAccessException {
-        Field[] fields = task.getClass().getDeclaredFields();
+    public static <T extends AbstractTask> List<OutPort> getOutPortsOfTask(T taskObj) throws IllegalAccessException {
+
         List<OutPort> outPorts = new ArrayList<>();
-        for (Field field : fields) {
-            TaskOutPort outPortAnnotation = field.getAnnotation(TaskOutPort.class);
-            if (outPortAnnotation != null) {
-                field.setAccessible(true);
-                OutPort outPort = (OutPort) field.get(task);
-                outPorts.add(outPort);
+        for (Class<?> c = taskObj.getClass(); c != null; c = c.getSuperclass()) {
+            Field[] fields = c.getDeclaredFields();
+            for (Field field : fields) {
+                TaskOutPort outPortAnnotation = field.getAnnotation(TaskOutPort.class);
+                if (outPortAnnotation != null) {
+                    field.setAccessible(true);
+                    OutPort outPort = (OutPort) field.get(taskObj);
+                    outPorts.add(outPort);
+                }
             }
         }
         return outPorts;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
index 315c07c..26361d2 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/AiravataTask.java
@@ -51,14 +51,13 @@ public abstract class AiravataTask extends AbstractTask {
     @TaskParam(name = "gatewayId")
     private String gatewayId;
 
-    @TaskOutPort(name = "Success Port")
-    private OutPort onSuccess;
-
+    @TaskOutPort(name = "Next Task")
+    private OutPort nextTask;
 
     protected TaskResult onSuccess(String message) {
         String successMessage = "Task " + getTaskId() + " completed." + message != null ? " Message : " + message : "";
         logger.info(successMessage);
-        return onSuccess.invoke(new TaskResult(TaskResult.Status.COMPLETED, message));
+        return nextTask.invoke(new TaskResult(TaskResult.Status.COMPLETED, message));
     }
 
     protected TaskResult onFail(String reason, boolean fatal, Throwable error) {
@@ -178,4 +177,7 @@ public abstract class AiravataTask extends AbstractTask {
         return processModel;
     }
 
+    public void setNextTask(OutPort nextTask) {
+        this.nextTask = nextTask;
+    }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
index cabc014..eafa53d 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
@@ -1,23 +1,18 @@
 package org.apache.airavata.helix.impl.task;
 
 import org.apache.airavata.agents.api.AgentAdaptor;
-import org.apache.airavata.helix.core.OutPort;
 import org.apache.airavata.helix.task.api.TaskHelper;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
-import org.apache.airavata.helix.task.api.annotation.TaskOutPort;
-import org.apache.airavata.helix.task.api.annotation.TaskParam;
 import org.apache.airavata.model.status.TaskState;
 import org.apache.airavata.registry.cpi.RegistryException;
 import org.apache.helix.task.TaskResult;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
 
 @TaskDef(name = "Environment Setup Task")
 public class EnvSetupTask extends AiravataTask {
 
-    @TaskParam(name = "Working Directory")
-    private String workingDirectory;
-
-    @TaskOutPort(name = "Success Out Port")
-    private OutPort successPort;
+    private static final Logger logger = LogManager.getLogger(EnvSetupTask.class);
 
     @Override
     public TaskResult onRun(TaskHelper taskHelper) {
@@ -30,18 +25,19 @@ public class EnvSetupTask extends AiravataTask {
                     getTaskContext().getComputeResourceCredentialToken(),
                     getTaskContext().getComputeResourceLoginUserName());
 
-            adaptor.createDirectory(workingDirectory);
+            logger.info("Creating directory " + getTaskContext().getWorkingDir() + " on compute resource " + getTaskContext().getComputeResourceId());
+            adaptor.createDirectory(getTaskContext().getWorkingDir());
             publishTaskState(TaskState.COMPLETED);
-            return successPort.invoke(new TaskResult(TaskResult.Status.COMPLETED, "Successfully completed"));
+            return onSuccess("Successfully completed");
         } catch (Exception e) {
             try {
                 publishTaskState(TaskState.FAILED);
             } catch (RegistryException e1) {
-                publishErrors(e1);
+                logger.error("Task failed to publish task status", e1);
+
                 // ignore silently
             }
-            publishErrors(e);
-            return new TaskResult(TaskResult.Status.FAILED, "Failed the task");
+            return onFail("Failed to setup environment of task " + getTaskId(), true, e);
         }
     }
 
@@ -50,19 +46,4 @@ public class EnvSetupTask extends AiravataTask {
 
     }
 
-    public String getWorkingDirectory() {
-        return workingDirectory;
-    }
-
-    public void setWorkingDirectory(String workingDirectory) {
-        this.workingDirectory = workingDirectory;
-    }
-
-    public OutPort getSuccessPort() {
-        return successPort;
-    }
-
-    public void setSuccessPort(OutPort successPort) {
-        this.successPort = successPort;
-    }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
index 397ff45..99db2c4 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/SimpleWorkflow.java
@@ -1,22 +1,66 @@
 package org.apache.airavata.helix.impl.workflow;
 
 import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.helix.core.OutPort;
+import org.apache.airavata.helix.impl.task.AiravataTask;
 import org.apache.airavata.helix.impl.task.EnvSetupTask;
 import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
 import org.apache.airavata.helix.workflow.WorkflowManager;
+import org.apache.airavata.model.experiment.ExperimentModel;
+import org.apache.airavata.model.process.ProcessModel;
+import org.apache.airavata.model.task.TaskModel;
+import org.apache.airavata.model.task.TaskTypes;
+import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
+import org.apache.airavata.registry.cpi.AppCatalog;
+import org.apache.airavata.registry.cpi.ExperimentCatalog;
+import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Optional;
 import java.util.UUID;
+import java.util.stream.Collectors;
 
 public class SimpleWorkflow {
 
     public static void main(String[] args) throws Exception {
 
-        EnvSetupTask envSetupTask = new EnvSetupTask();
-        envSetupTask.setWorkingDirectory("/tmp/a");
+        String processId = "PROCESS_438a87cc-2dec-4edc-bfeb-31128df91bb6";
+        AppCatalog appCatalog = RegistryFactory.getAppCatalog();
+        ExperimentCatalog experimentCatalog = RegistryFactory.getDefaultExpCatalog();
 
-        DefaultJobSubmissionTask defaultJobSubmissionTask = new DefaultJobSubmissionTask();
+        ProcessModel processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
+        ExperimentModel experimentModel = (ExperimentModel) experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT, processModel.getExperimentId());
+        String taskDag = processModel.getTaskDag();
+        List<TaskModel> taskList = processModel.getTasks();
+
+        String[] taskIds = taskDag.split(",");
+        final List<AiravataTask> allTasks = new ArrayList<>();
+
+        for (String taskId : taskIds) {
+            Optional<TaskModel> model = taskList.stream().filter(taskModel -> taskModel.getTaskId().equals(taskId)).findFirst();
+            model.ifPresent(taskModel -> {
+                AiravataTask airavataTask = null;
+                if (taskModel.getTaskType() == TaskTypes.ENV_SETUP) {
+                    airavataTask = new EnvSetupTask();
+                } else if (taskModel.getTaskType() == TaskTypes.JOB_SUBMISSION) {
+                    airavataTask = new DefaultJobSubmissionTask();
+                }
+
+                if (airavataTask != null) {
+                    airavataTask.setGatewayId(experimentModel.getGatewayId());
+                    airavataTask.setExperimentId(experimentModel.getExperimentId());
+                    airavataTask.setProcessId(processModel.getProcessId());
+                    airavataTask.setTaskId(taskModel.getTaskId());
+                    if (allTasks.size() > 0) {
+                        allTasks.get(allTasks.size() -1).setNextTask(new OutPort(airavataTask.getTaskId(), airavataTask));
+                    }
+                    allTasks.add(airavataTask);
+                }
+            });
+        }
+
+/*        DefaultJobSubmissionTask defaultJobSubmissionTask = new DefaultJobSubmissionTask();
         defaultJobSubmissionTask.setGatewayId("default");
         defaultJobSubmissionTask.setExperimentId("Clone_of_Mothur-Test1_0c9f627e-2c32-403e-a28a-2a8b10c21c1a");
         defaultJobSubmissionTask.setProcessId("PROCESS_438a87cc-2dec-4edc-bfeb-31128df91bb6");
@@ -24,8 +68,8 @@ public class SimpleWorkflow {
 
         List<AbstractTask> tasks = new ArrayList<>();
         tasks.add(defaultJobSubmissionTask);
-
+*/
         WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster", "wm-22", "localhost:2199");
-        workflowManager.launchWorkflow(UUID.randomUUID().toString(), tasks, true);
+        workflowManager.launchWorkflow(UUID.randomUUID().toString(), allTasks.stream().map(t -> (AiravataTask)t).collect(Collectors.toList()), true);
     }
 }

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 03/17: Stabalizing DefaultJobSubmission Task

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit b199bc2090ff9bc8b9c5827adeedcbfbfe181cc0
Author: dimuthu <di...@gmail.com>
AuthorDate: Mon Feb 26 01:11:29 2018 -0500

    Stabalizing DefaultJobSubmission Task
---
 .../airavata-helix/agent-impl/ssh-agent/pom.xml    |  21 ++--
 .../airavata/helix/agent/ssh/SshAgentAdaptor.java  | 132 ++++++++++++---------
 .../helix/agent/ssh/StandardOutReader.java         |  80 ++++---------
 .../helix/task/api/support/AdaptorSupport.java     |  34 +-----
 modules/airavata-helix/task-core/pom.xml           |   5 +
 .../helix/core/support/AdaptorSupportImpl.java     |  19 +--
 .../airavata/helix/impl/task/EnvSetupTask.java     |   4 +-
 .../airavata/helix/impl/task/TaskContext.java      |   9 +-
 .../impl/task/submission/GroovyMapBuilder.java     |  74 +++++++++++-
 .../helix/impl/task/submission/GroovyMapData.java  |  10 +-
 .../submission/task/DefaultJobSubmissionTask.java  |   8 +-
 .../submission/task/ForkJobSubmissionTask.java     |   4 +-
 .../task/submission/task/JobSubmissionTask.java    |  13 +-
 .../submission/task/LocalJobSubmissionTask.java    |   6 +-
 .../src/main/resources/airavata-server.properties  |   4 +-
 .../src/main/resources/log4j.properties            |   2 +
 16 files changed, 242 insertions(+), 183 deletions(-)

diff --git a/modules/airavata-helix/agent-impl/ssh-agent/pom.xml b/modules/airavata-helix/agent-impl/ssh-agent/pom.xml
index 44cf919..bc78971 100644
--- a/modules/airavata-helix/agent-impl/ssh-agent/pom.xml
+++ b/modules/airavata-helix/agent-impl/ssh-agent/pom.xml
@@ -3,9 +3,10 @@
          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
-        <artifactId>agent-impl</artifactId>
-        <groupId>org.apache</groupId>
-        <version>1.0-SNAPSHOT</version>
+        <artifactId>airavata-helix</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -33,19 +34,15 @@
             <artifactId>airavata-credential-store</artifactId>
             <version>0.17-SNAPSHOT</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>agent-api</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
     </dependencies>
 
     <build>
         <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.5.1</version>
-                <configuration>
-                    <source>${java.version}</source>
-                    <target>${java.version}</target>
-                </configuration>
-            </plugin>
 
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
index 19b429c..ef8d580 100644
--- a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/SshAgentAdaptor.java
@@ -13,7 +13,6 @@ import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
-import org.apache.airavata.registry.cpi.ComputeResource;
 
 import java.io.*;
 import java.util.Arrays;
@@ -130,47 +129,64 @@ public class SshAgentAdaptor implements AgentAdaptor {
 
     public CommandOutput executeCommand(String command, String workingDirectory) throws AgentException {
         StandardOutReader commandOutput = new StandardOutReader();
+        ChannelExec channelExec = null;
         try {
-            ChannelExec channelExec = ((ChannelExec) session.openChannel("exec"));
+            channelExec = ((ChannelExec) session.openChannel("exec"));
             channelExec.setCommand(command);
             channelExec.setInputStream(null);
-            channelExec.setErrStream(commandOutput.getStandardError());
+            InputStream out = channelExec.getInputStream();
+            InputStream err = channelExec.getErrStream();
             channelExec.connect();
-            commandOutput.onOutput(channelExec);
+
+            commandOutput.setExitCode(channelExec.getExitStatus());
+            commandOutput.readStdOutFromStream(out);
+            commandOutput.readStdErrFromStream(err);
             return commandOutput;
         } catch (JSchException e) {
+            e.printStackTrace();
+            throw new AgentException(e);
+        } catch (IOException e) {
+            e.printStackTrace();
             throw new AgentException(e);
+        } finally {
+            if (channelExec != null) {
+                channelExec.disconnect();
+            }
         }
     }
 
     public void createDirectory(String path) throws AgentException {
+        String command = "mkdir -p " + path;
+        ChannelExec channelExec = null;
         try {
-            String command = "mkdir -p " + path;
-            Channel channel = session.openChannel("exec");
+            channelExec = (ChannelExec)session.openChannel("exec");
             StandardOutReader stdOutReader = new StandardOutReader();
 
-            ((ChannelExec) channel).setCommand(command);
+            channelExec.setCommand(command);
+            InputStream out = channelExec.getInputStream();
+            InputStream err = channelExec.getErrStream();
+            channelExec.connect();
+
+            stdOutReader.readStdOutFromStream(out);
+            stdOutReader.readStdErrFromStream(err);
 
-            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-            try {
-                channel.connect();
-            } catch (JSchException e) {
 
-                channel.disconnect();
-                System.out.println("Unable to retrieve command output. Command - " + command +
-                        " on server - " + session.getHost() + ":" + session.getPort() +
-                        " connecting user name - "
-                        + session.getUserName());
-                throw new AgentException(e);
-            }
-            stdOutReader.onOutput(channel);
-            if (stdOutReader.getStdErrorString().contains("mkdir:")) {
-                throw new AgentException(stdOutReader.getStdErrorString());
+            if (stdOutReader.getStdError() != null && stdOutReader.getStdError().contains("mkdir:")) {
+                throw new AgentException(stdOutReader.getStdError());
             }
-
-            channel.disconnect();
         } catch (JSchException e) {
+            System.out.println("Unable to retrieve command output. Command - " + command +
+                    " on server - " + session.getHost() + ":" + session.getPort() +
+                    " connecting user name - "
+                    + session.getUserName());
+            throw new AgentException(e);
+        } catch (IOException e) {
+            e.printStackTrace();
             throw new AgentException(e);
+        } finally {
+            if (channelExec != null) {
+                channelExec.disconnect();
+            }
         }
     }
 
@@ -182,20 +198,22 @@ public class SshAgentAdaptor implements AgentAdaptor {
         }
         boolean ptimestamp = true;
 
+        ChannelExec channelExec = null;
         try {
             // exec 'scp -t rfile' remotely
             String command = "scp " + (ptimestamp ? "-p" : "") + " -t " + remoteFile;
-            Channel channel = session.openChannel("exec");
+            channelExec = (ChannelExec)session.openChannel("exec");
 
             StandardOutReader stdOutReader = new StandardOutReader();
-            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-            ((ChannelExec) channel).setCommand(command);
+            //channelExec.setErrStream(stdOutReader.getStandardError());
+            channelExec.setCommand(command);
 
             // get I/O streams for remote scp
-            OutputStream out = channel.getOutputStream();
-            InputStream in = channel.getInputStream();
+            OutputStream out = channelExec.getOutputStream();
+            InputStream in = channelExec.getInputStream();
+            InputStream err = channelExec.getErrStream();
 
-            channel.connect();
+            channelExec.connect();
 
             if (checkAck(in) != 0) {
                 String error = "Error Reading input Stream";
@@ -255,12 +273,10 @@ public class SshAgentAdaptor implements AgentAdaptor {
                 throw new AgentException(error);
             }
             out.close();
-            stdOutReader.onOutput(channel);
-
+            stdOutReader.readStdErrFromStream(err);
 
-            channel.disconnect();
-            if (stdOutReader.getStdErrorString().contains("scp:")) {
-                throw new AgentException(stdOutReader.getStdErrorString());
+            if (stdOutReader.getStdError().contains("scp:")) {
+                throw new AgentException(stdOutReader.getStdError());
             }
             //since remote file is always a file  we just return the file
             //return remoteFile;
@@ -273,43 +289,47 @@ public class SshAgentAdaptor implements AgentAdaptor {
         } catch (IOException e) {
             e.printStackTrace();
             throw new AgentException(e);
+        } finally {
+            if (channelExec != null) {
+                channelExec.disconnect();
+            }
         }
     }
 
     @Override
     public List<String> listDirectory(String path) throws AgentException {
-
+        String command = "ls " + path;
+        ChannelExec channelExec = null;
         try {
-            String command = "ls " + path;
-            Channel channel = session.openChannel("exec");
+            channelExec = (ChannelExec)session.openChannel("exec");
             StandardOutReader stdOutReader = new StandardOutReader();
 
-            ((ChannelExec) channel).setCommand(command);
+            channelExec.setCommand(command);
 
+            InputStream out = channelExec.getInputStream();
+            InputStream err = channelExec.getErrStream();
 
-            ((ChannelExec) channel).setErrStream(stdOutReader.getStandardError());
-            try {
-                channel.connect();
-            } catch (JSchException e) {
-
-                channel.disconnect();
-//            session.disconnect();
+            channelExec.connect();
 
-                throw new AgentException("Unable to retrieve command output. Command - " + command +
-                        " on server - " + session.getHost() + ":" + session.getPort() +
-                        " connecting user name - "
-                        + session.getUserName(), e);
-            }
-            stdOutReader.onOutput(channel);
-            stdOutReader.getStdOutputString();
-            if (stdOutReader.getStdErrorString().contains("ls:")) {
-                throw new AgentException(stdOutReader.getStdErrorString());
+            stdOutReader.readStdOutFromStream(out);
+            stdOutReader.readStdErrFromStream(err);
+            if (stdOutReader.getStdError().contains("ls:")) {
+                throw new AgentException(stdOutReader.getStdError());
             }
-            channel.disconnect();
-            return Arrays.asList(stdOutReader.getStdOutputString().split("\n"));
+            return Arrays.asList(stdOutReader.getStdOut().split("\n"));
 
         } catch (JSchException e) {
+            throw new AgentException("Unable to retrieve command output. Command - " + command +
+                    " on server - " + session.getHost() + ":" + session.getPort() +
+                    " connecting user name - "
+                    + session.getUserName(), e);
+        } catch (IOException e) {
+            e.printStackTrace();
             throw new AgentException(e);
+        } finally {
+            if (channelExec != null) {
+                channelExec.disconnect();
+            }
         }
     }
 
diff --git a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java
index 49c036e..94ba566 100644
--- a/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java
+++ b/modules/airavata-helix/agent-impl/ssh-agent/src/main/java/org/apache/airavata/helix/agent/ssh/StandardOutReader.java
@@ -2,11 +2,9 @@ package org.apache.airavata.helix.agent.ssh;
 
 import com.jcraft.jsch.Channel;
 import org.apache.airavata.agents.api.CommandOutput;
+import org.apache.commons.io.IOUtils;
 
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
+import java.io.*;
 
 /**
  * TODO: Class level comments please
@@ -16,68 +14,38 @@ import java.io.OutputStream;
  */
 public class StandardOutReader implements CommandOutput {
 
-    // Todo improve this. We need to direct access of std out and exit code
+    private String stdOut;
+    private String stdError;
+    private Integer exitCode;
 
-    String stdOutputString = null;
-    ByteArrayOutputStream errorStream = new ByteArrayOutputStream();
-    private int exitCode;
-
-    public void onOutput(Channel channel) {
-        try {
-            StringBuffer pbsOutput = new StringBuffer("");
-            InputStream inputStream =  channel.getInputStream();
-            byte[] tmp = new byte[1024];
-            do {
-                while (inputStream.available() > 0) {
-                    int i = inputStream.read(tmp, 0, 1024);
-                    if (i < 0) break;
-                    pbsOutput.append(new String(tmp, 0, i));
-                }
-            } while (!channel.isClosed()) ;
-            String output = pbsOutput.toString();
-            this.setStdOutputString(output);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-    public void exitCode(int code) {
-        System.out.println("Program exit code - " + code);
-        this.exitCode = code;
-    }
-
-    public int getExitCode() {
-        return exitCode;
-    }
-
-    public String getStdOutputString() {
-        return stdOutputString;
-    }
-
-    public void setStdOutputString(String stdOutputString) {
-        this.stdOutputString = stdOutputString;
+    @Override
+    public String getStdOut() {
+        return this.stdOut;
     }
 
-    public String getStdErrorString() {
-        return errorStream.toString();
+    @Override
+    public String getStdError() {
+        return this.stdError;
     }
 
-    public OutputStream getStandardError() {
-        return errorStream;
+    @Override
+    public Integer getExitCode() {
+        return this.exitCode;
     }
 
-    @Override
-    public String getStdOut() {
-        return null;
+    public void readStdOutFromStream(InputStream is) throws IOException {
+        StringWriter writer = new StringWriter();
+        IOUtils.copy(is, writer, "UTF-8");
+        this.stdOut = writer.toString();
     }
 
-    @Override
-    public String getStdError() {
-        return null;
+    public void readStdErrFromStream(InputStream is) throws IOException {
+        StringWriter writer = new StringWriter();
+        IOUtils.copy(is, writer, "UTF-8");
+        this.stdError = writer.toString();
     }
 
-    @Override
-    public String getExitCommand() {
-        return null;
+    public void setExitCode(Integer exitCode) {
+        this.exitCode = exitCode;
     }
 }
diff --git a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
index 3e24aaa..4b6e11e 100644
--- a/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
+++ b/modules/airavata-helix/task-api/src/main/java/org/apache/airavata/helix/task/api/support/AdaptorSupport.java
@@ -15,38 +15,6 @@ import java.io.File;
 public interface AdaptorSupport {
     public void initializeAdaptor();
 
-    public AgentAdaptor fetchAdaptor(String computeResource, String protocol, String authToken) throws Exception;
+    public AgentAdaptor fetchAdaptor(String gatewayId, String computeResource, String protocol, String authToken, String userId) throws Exception;
 
-
-    /**
-     *
-     * @param command
-     * @param workingDirectory
-     * @param computeResourceId
-     * @param protocol
-     * @param authToken
-     * @throws Exception
-     */
-    public CommandOutput executeCommand(String command, String workingDirectory, String computeResourceId, String protocol, String authToken) throws Exception;
-
-    /**
-     *
-     * @param path
-     * @param computeResourceId
-     * @param protocol
-     * @param authToken
-     * @throws Exception
-     */
-    public void createDirectory(String path, String computeResourceId, String protocol, String authToken) throws Exception;
-
-    /**
-     *
-     * @param sourceFile
-     * @param destinationFile
-     * @param computeResourceId
-     * @param protocol
-     * @param authToken
-     * @throws Exception
-     */
-    public void copyFile(String sourceFile, String destinationFile, String computeResourceId, String protocol, String authToken) throws Exception;
 }
diff --git a/modules/airavata-helix/task-core/pom.xml b/modules/airavata-helix/task-core/pom.xml
index df72dac..bf860f8 100644
--- a/modules/airavata-helix/task-core/pom.xml
+++ b/modules/airavata-helix/task-core/pom.xml
@@ -28,6 +28,11 @@
             <artifactId>agent-api</artifactId>
             <version>0.17-SNAPSHOT</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>ssh-agent</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
     </dependencies>
 
     <!--<build>
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
index 87a1e17..a98b8f0 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/support/AdaptorSupportImpl.java
@@ -1,6 +1,7 @@
 package org.apache.airavata.helix.core.support;
 
 import org.apache.airavata.agents.api.*;
+import org.apache.airavata.helix.agent.ssh.SshAgentAdaptor;
 import org.apache.airavata.helix.task.api.support.AdaptorSupport;
 
 import java.io.File;
@@ -29,19 +30,9 @@ public class AdaptorSupportImpl implements AdaptorSupport {
     public void initializeAdaptor() {
     }
 
-    public CommandOutput executeCommand(String command, String workingDirectory, String computeResourceId, String protocol, String authToken) throws AgentException {
-        return fetchAdaptor(computeResourceId, protocol, authToken).executeCommand(command, workingDirectory);
-    }
-
-    public void createDirectory(String path, String computeResourceId, String protocol, String authToken) throws AgentException {
-        fetchAdaptor(computeResourceId, protocol, authToken).createDirectory(path);
-    }
-
-    public void copyFile(String sourceFile, String destinationFile, String computeResourceId, String protocol, String authToken) throws AgentException {
-        fetchAdaptor(computeResourceId, protocol, authToken).copyFile(sourceFile, destinationFile);
-    }
-
-    public AgentAdaptor fetchAdaptor(String computeResource, String protocol, String authToken) throws AgentException {
-         return agentStore.fetchAdaptor(computeResource, protocol, authToken);
+    public AgentAdaptor fetchAdaptor(String gatewayId, String computeResource, String protocol, String authToken, String userId) throws AgentException {
+        SshAgentAdaptor agentAdaptor = new SshAgentAdaptor();
+        agentAdaptor.init(computeResource, gatewayId, userId, authToken);
+        return agentAdaptor;
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
index f079b9f..cabc014 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/EnvSetupTask.java
@@ -24,9 +24,11 @@ public class EnvSetupTask extends AiravataTask {
         try {
             publishTaskState(TaskState.EXECUTING);
             AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                    getTaskContext().getGatewayId(),
                     getTaskContext().getComputeResourceId(),
                     getTaskContext().getJobSubmissionProtocol().name(),
-                    getTaskContext().getComputeResourceCredentialToken());
+                    getTaskContext().getComputeResourceCredentialToken(),
+                    getTaskContext().getComputeResourceLoginUserName());
 
             adaptor.createDirectory(workingDirectory);
             publishTaskState(TaskState.COMPLETED);
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
index 7de738e..f33d8a1 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskContext.java
@@ -85,7 +85,7 @@ public class TaskContext {
      * Note: process context property use lazy loading approach. In runtime you will see some properties as null
      * unless you have access it previously. Once that property access using the api,it will be set to correct value.
      */
-    private TaskContext(String taskId, String processId, String gatewayId) {
+    private TaskContext(String processId, String gatewayId, String taskId) {
         this.processId = processId;
         this.gatewayId = gatewayId;
         this.taskId = taskId;
@@ -784,7 +784,12 @@ public class TaskContext {
             ctx.setGatewayResourceProfile(gatewayResourceProfile);
             ctx.setGatewayComputeResourcePreference(gatewayComputeResourcePreference);
             ctx.setGatewayStorageResourcePreference(gatewayStorageResourcePreference);
-
+            ctx.setApplicationDeploymentDescription(appCatalog.getApplicationDeployment()
+                    .getApplicationDeployement(processModel.getApplicationDeploymentId()));
+            ctx.setApplicationInterfaceDescription(appCatalog.getApplicationInterface()
+                    .getApplicationInterface(processModel.getApplicationInterfaceId()));
+            ctx.setComputeResourceDescription(appCatalog.getComputeResource().getComputeResource
+                    (ctx.getComputeResourceId()));
             return ctx;
         }
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
index 0b92922..16e8114 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapBuilder.java
@@ -2,17 +2,21 @@ package org.apache.airavata.helix.impl.task.submission;
 
 import groovy.text.GStringTemplateEngine;
 import groovy.text.TemplateEngine;
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.helix.impl.task.TaskContext;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appdeployment.CommandObject;
 import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.model.application.io.DataType;
 import org.apache.airavata.model.application.io.InputDataObjectType;
 import org.apache.airavata.model.application.io.OutputDataObjectType;
 import org.apache.airavata.model.parallelism.ApplicationParallelismType;
+import org.apache.airavata.model.process.ProcessModel;
 import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel;
 import org.apache.airavata.model.task.JobSubmissionTaskModel;
+import org.apache.airavata.registry.cpi.AppCatalogException;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 import org.apache.thrift.TException;
@@ -38,6 +42,8 @@ public class GroovyMapBuilder {
 
     public GroovyMapData build() throws Exception {
         GroovyMapData mapData = new GroovyMapData();
+
+        setMailAddresses(taskContext, mapData);
         mapData.setInputDir(taskContext.getInputDir());
         mapData.setOutputDir(taskContext.getOutputDir());
         mapData.setExecutablePath(taskContext.getApplicationDeploymentDescription().getExecutablePath());
@@ -51,6 +57,7 @@ public class GroovyMapBuilder {
         mapData.setAccountString(taskContext.getAllocationProjectNumber());
         mapData.setReservation(taskContext.getReservation());
         mapData.setJobName("A" + String.valueOf(generateJobName()));
+        mapData.setWorkingDirectory(taskContext.getWorkingDir());
 
         List<String> inputValues = getProcessInputValues(taskContext.getProcessModel().getProcessInputs(), true);
         inputValues.addAll(getProcessOutputValues(taskContext.getProcessModel().getProcessOutputs(), true));
@@ -332,4 +339,69 @@ public class GroovyMapBuilder {
         }
     }
 
+    private static void setMailAddresses(TaskContext taskContext, GroovyMapData groovyMap) throws AppCatalogException,
+            ApplicationSettingsException {
+
+        ProcessModel processModel =  taskContext.getProcessModel();
+        String emailIds = null;
+        if (isEmailBasedJobMonitor(taskContext)) {
+            emailIds = ServerSettings.getEmailBasedMonitorAddress();
+        }
+        if (ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_ENABLE).equalsIgnoreCase("true")) {
+            String userJobNotifEmailIds = ServerSettings.getSetting(ServerSettings.JOB_NOTIFICATION_EMAILIDS);
+            if (userJobNotifEmailIds != null && !userJobNotifEmailIds.isEmpty()) {
+                if (emailIds != null && !emailIds.isEmpty()) {
+                    emailIds += ("," + userJobNotifEmailIds);
+                } else {
+                    emailIds = userJobNotifEmailIds;
+                }
+            }
+            if (processModel.isEnableEmailNotification()) {
+                List<String> emailList = processModel.getEmailAddresses();
+                String elist = listToCsv(emailList, ',');
+                if (elist != null && !elist.isEmpty()) {
+                    if (emailIds != null && !emailIds.isEmpty()) {
+                        emailIds = emailIds + "," + elist;
+                    } else {
+                        emailIds = elist;
+                    }
+                }
+            }
+        }
+        if (emailIds != null && !emailIds.isEmpty()) {
+            logger.info("Email list: " + emailIds);
+            groovyMap.setMailAddress(emailIds);
+        }
+    }
+
+    public static boolean isEmailBasedJobMonitor(TaskContext taskContext) throws AppCatalogException {
+        JobSubmissionProtocol jobSubmissionProtocol = taskContext.getPreferredJobSubmissionProtocol();
+        JobSubmissionInterface jobSubmissionInterface = taskContext.getPreferredJobSubmissionInterface();
+        if (jobSubmissionProtocol == JobSubmissionProtocol.SSH) {
+            String jobSubmissionInterfaceId = jobSubmissionInterface.getJobSubmissionInterfaceId();
+            SSHJobSubmission sshJobSubmission = taskContext.getAppCatalog().getComputeResource().getSSHJobSubmission(jobSubmissionInterfaceId);
+            MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
+            return monitorMode != null && monitorMode == MonitorMode.JOB_EMAIL_NOTIFICATION_MONITOR;
+        } else {
+            return false;
+        }
+    }
+
+    public static String listToCsv(List<String> listOfStrings, char separator) {
+        StringBuilder sb = new StringBuilder();
+
+        // all but last
+        for (int i = 0; i < listOfStrings.size() - 1; i++) {
+            sb.append(listOfStrings.get(i));
+            sb.append(separator);
+        }
+
+        // last string, no separator
+        if (listOfStrings.size() > 0) {
+            sb.append(listOfStrings.get(listOfStrings.size() - 1));
+        }
+
+        return sb.toString();
+    }
+
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
index 995f772..6ebde21 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/GroovyMapData.java
@@ -1,10 +1,11 @@
 package org.apache.airavata.helix.impl.task.submission;
 
-import com.google.common.collect.ImmutableMap;
 import groovy.lang.Writable;
 import groovy.text.GStringTemplateEngine;
 import groovy.text.TemplateEngine;
 import org.apache.airavata.common.utils.ApplicationSettings;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
 
 import java.io.File;
 import java.lang.reflect.Field;
@@ -15,6 +16,8 @@ import java.util.Map;
 
 public class GroovyMapData {
 
+    private static final Logger logger = LogManager.getLogger(GroovyMapData.class);
+
     @ScriptTag(name = "inputDir")
     private String inputDir;
 
@@ -453,6 +456,11 @@ public class GroovyMapData {
         } catch (Exception e) {
             throw new Exception("Error while generating script using groovy map");
         }
+
+        if (logger.isTraceEnabled()) {
+            logger.trace("Groovy map as string for template " + templateName);
+            logger.trace(make.toString());
+        }
         return make.toString();
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
index fab4747..c85e18b 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -50,9 +50,11 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
             if (mapData != null) {
                 //jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
                 AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                        getTaskContext().getGatewayId(),
                         getTaskContext().getComputeResourceId(),
                         getTaskContext().getJobSubmissionProtocol().name(),
-                        getTaskContext().getComputeResourceCredentialToken());
+                        getTaskContext().getComputeResourceCredentialToken(),
+                        getTaskContext().getComputeResourceLoginUserName());
 
                 JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
 
@@ -69,6 +71,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                         statusList.add(new JobStatus(JobState.FAILED));
                         statusList.get(0).setReason(submissionOutput.getFailureReason());
                         jobModel.setJobStatuses(statusList);
+                        jobModel.setJobDescription("Sample description");
                         saveJobModel(jobModel);
                         logger.error("expId: " + getExperimentId() + ", processid: " + getProcessId()+ ", taskId: " +
                                 getTaskId() + " :- Job submission failed for job name " + jobModel.getJobName());
@@ -83,6 +86,8 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                         //taskStatus.setReason("Job submission command didn't return a jobId");
                         //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                         //taskContext.setTaskStatus(taskStatus);
+                        logger.error("Standard error message : " + submissionOutput.getStdErr());
+                        logger.error("Standard out message : " + submissionOutput.getStdOut());
                         return onFail("Job submission command didn't return a jobId", false, null);
 
                     } else {
@@ -116,6 +121,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
 
                     //TODO save task status??
                 } else if (jobId != null && !jobId.isEmpty()) {
+                    logger.info("Received job id " + jobId + " from compute resource");
                     jobModel.setJobId(jobId);
                     saveJobModel(jobModel);
                     JobStatus jobStatus = new JobStatus();
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
index 58b70ef..2e4a052 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/ForkJobSubmissionTask.java
@@ -36,9 +36,11 @@ public class ForkJobSubmissionTask extends JobSubmissionTask {
             if (mapData != null) {
                 //jobModel.setJobDescription(FileUtils.readFileToString(jobFile));
                 AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                        getTaskContext().getGatewayId(),
                         getTaskContext().getComputeResourceId(),
                         getTaskContext().getJobSubmissionProtocol().name(),
-                        getTaskContext().getComputeResourceCredentialToken());
+                        getTaskContext().getComputeResourceCredentialToken(),
+                        getTaskContext().getComputeResourceLoginUserName());
 
                 JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, mapData.getWorkingDirectory());
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
index 11e59eb..1a024a7 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/JobSubmissionTask.java
@@ -28,6 +28,8 @@ import org.apache.airavata.model.status.JobStatus;
 import org.apache.airavata.registry.cpi.*;
 import org.apache.commons.io.FileUtils;
 import org.apache.helix.HelixManager;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
 
 import java.io.File;
 import java.security.SecureRandom;
@@ -35,7 +37,7 @@ import java.util.*;
 
 public abstract class JobSubmissionTask extends AiravataTask {
 
-
+    private static final Logger logger = LogManager.getLogger(JobSubmissionTask.class);
 
     @Override
     public void init(HelixManager manager, String workflowName, String jobName, String taskName) {
@@ -52,10 +54,19 @@ public abstract class JobSubmissionTask extends AiravataTask {
         int number = new SecureRandom().nextInt();
         number = (number < 0 ? -number : number);
         File tempJobFile = new File(getLocalDataDir(), "job_" + Integer.toString(number) + jobManagerConfiguration.getScriptExtension());
+
         FileUtils.writeStringToFile(tempJobFile, scriptAsString);
+        logger.info("Job submission file for process " + getProcessId() + " was created at : " + tempJobFile.getAbsolutePath());
 
+        logger.info("Copying file form " + tempJobFile.getAbsolutePath() + " to remote path " + workingDirectory +
+                " of compute resource " + getTaskContext().getComputeResourceId());
+        agentAdaptor.copyFile(tempJobFile.getAbsolutePath(), workingDirectory);
         // TODO transfer file
         RawCommandInfo submitCommand = jobManagerConfiguration.getSubmitCommand(workingDirectory, tempJobFile.getPath());
+
+        logger.debug("Submit command for process id " + getProcessId() + " : " + submitCommand.getRawCommand());
+        logger.debug("Working directory for process id " + getProcessId() + " : " + workingDirectory);
+
         CommandOutput commandOutput = agentAdaptor.executeCommand(submitCommand.getRawCommand(), workingDirectory);
 
         JobSubmissionOutput jsoutput = new JobSubmissionOutput();
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
index 67ad0db..e3ae4fa 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/LocalJobSubmissionTask.java
@@ -43,10 +43,12 @@ public class LocalJobSubmissionTask extends JobSubmissionTask {
                 saveJobModel(jobModel);
 
                 AgentAdaptor adaptor = taskHelper.getAdaptorSupport().fetchAdaptor(
+                        getTaskContext().getGatewayId(),
                         getTaskContext().getComputeResourceId(),
                         getTaskContext().getJobSubmissionProtocol().name(),
-                        getTaskContext().getComputeResourceCredentialToken());
-
+                        getTaskContext().getComputeResourceCredentialToken(),
+                        getTaskContext().getComputeResourceLoginUserName());
+                
                 GroovyMapData mapData = new GroovyMapBuilder(getTaskContext()).build();
                 JobSubmissionOutput submissionOutput = submitBatchJob(adaptor, mapData, groovyMapData.getWorkingDirectory());
 
diff --git a/modules/helix-spectator/src/main/resources/airavata-server.properties b/modules/helix-spectator/src/main/resources/airavata-server.properties
index 5f47d79..b54b28c 100644
--- a/modules/helix-spectator/src/main/resources/airavata-server.properties
+++ b/modules/helix-spectator/src/main/resources/airavata-server.properties
@@ -202,9 +202,9 @@ job.notification.flags=abe
 ###########################################################################
 # Credential Store module Configuration
 ###########################################################################
-credential.store.keystore.url=/home/pga/master-deployment/keystores/cred_store.jks
+credential.store.keystore.url=/Users/dimuthu/code/fork/airavata/modules/helix-spectator/src/main/resources/cred_store.jks
 credential.store.keystore.alias=seckey
-credential.store.keystore.password=123456
+credential.store.keystore.password=credstore123
 credential.store.jdbc.url=jdbc:mariadb://149.165.168.248:3306/credential_store
 credential.store.jdbc.user=eroma
 credential.store.jdbc.password=eroma123456
diff --git a/modules/helix-spectator/src/main/resources/log4j.properties b/modules/helix-spectator/src/main/resources/log4j.properties
index e910f32..69a4301 100644
--- a/modules/helix-spectator/src/main/resources/log4j.properties
+++ b/modules/helix-spectator/src/main/resources/log4j.properties
@@ -3,6 +3,8 @@ log4j.rootLogger=INFO, A1
 
 log4j.category.org.apache.helix=WARN
 log4j.category.org.apache.zookeeper=WARN
+log4j.category.org.apache.airavata.helix.impl.task.submission.GroovyMapData=TRACE
+log4j.category.org.apache.airavata.helix.impl.task.submission.task.JobSubmissionTask=DEBUG
 # A1 is set to be a ConsoleAppender.
 log4j.appender.A1=org.apache.log4j.ConsoleAppender
 

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 09/17: Configuring pre workflow manager to read from rabbitmq launch queue

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit ca455645a09da0c8184a741c7ee5cb3853338d70
Author: dimuthu <di...@gmail.com>
AuthorDate: Sun Mar 4 20:48:23 2018 -0500

    Configuring pre workflow manager to read from rabbitmq launch queue
---
 .../airavata/helix/workflow/WorkflowManager.java   | 22 ++++--
 modules/helix-spectator/pom.xml                    |  5 ++
 .../submission/task/DefaultJobSubmissionTask.java  |  2 +-
 .../helix/impl/workflow/PostWorkflowManager.java   | 18 ++---
 .../helix/impl/workflow/PreWorkflowManager.java    | 92 ++++++++++++++++++----
 5 files changed, 102 insertions(+), 37 deletions(-)

diff --git a/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
index 9ecafb9..e3d07b7 100644
--- a/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
+++ b/modules/airavata-helix/workflow-impl/src/main/java/org/apache/airavata/helix/workflow/WorkflowManager.java
@@ -2,13 +2,14 @@ package org.apache.airavata.helix.workflow;
 
 import org.apache.airavata.helix.core.AbstractTask;
 import org.apache.airavata.helix.core.OutPort;
-import org.apache.airavata.helix.core.util.*;
 import org.apache.airavata.helix.core.util.TaskUtil;
 import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.helix.HelixManager;
 import org.apache.helix.HelixManagerFactory;
 import org.apache.helix.InstanceType;
 import org.apache.helix.task.*;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -22,6 +23,8 @@ import java.util.Map;
  */
 public class WorkflowManager {
 
+    private static final Logger logger = LogManager.getLogger(WorkflowManager.class);
+
     private static final String WORKFLOW_PREFIX = "Workflow_of_process_";
     private TaskDriver taskDriver;
 
@@ -43,9 +46,12 @@ public class WorkflowManager {
         taskDriver = new TaskDriver(helixManager);
     }
 
-    public void launchWorkflow(String processId, List<AbstractTask> tasks, boolean globalParticipant) throws Exception {
+    public String launchWorkflow(String processId, List<AbstractTask> tasks, boolean globalParticipant, boolean monitor) throws Exception {
+
+        String workflowName = WORKFLOW_PREFIX + processId;
+        logger.info("Launching workflow " + workflowName + " for process " + processId);
 
-        Workflow.Builder workflowBuilder = new Workflow.Builder(WORKFLOW_PREFIX + processId).setExpiry(0);
+        Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName).setExpiry(0);
 
         for (int i = 0; i < tasks.size(); i++) {
             AbstractTask data = tasks.get(i);
@@ -86,9 +92,13 @@ public class WorkflowManager {
         //TODO : Do we need to monitor workflow status? If so how do we do it in a scalable manner? For example,
         // if the hfac that monitors a particular workflow, got killed due to some reason, who is taking the responsibility
 
-        TaskState taskState = taskDriver.pollForWorkflowState(workflow.getName(),
-                TaskState.COMPLETED, TaskState.FAILED, TaskState.STOPPED, TaskState.ABORTED);
-        System.out.println("Workflow finished with state " + taskState.name());
+        if (monitor) {
+            TaskState taskState = taskDriver.pollForWorkflowState(workflow.getName(),
+                    TaskState.COMPLETED, TaskState.FAILED, TaskState.STOPPED, TaskState.ABORTED);
+            logger.info("Workflow " + workflowName + " for process " + processId + " finished with state " + taskState.name());
+
+        }
+        return workflowName;
 
     }
 }
\ No newline at end of file
diff --git a/modules/helix-spectator/pom.xml b/modules/helix-spectator/pom.xml
index 213f747..326d7ef 100644
--- a/modules/helix-spectator/pom.xml
+++ b/modules/helix-spectator/pom.xml
@@ -60,5 +60,10 @@
             <artifactId>job-monitor</artifactId>
             <version>0.17-SNAPSHOT</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>airavata-messaging-core</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
     </dependencies>
 </project>
\ No newline at end of file
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
index a60a955..31b6f30 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/task/DefaultJobSubmissionTask.java
@@ -200,7 +200,7 @@ public class DefaultJobSubmissionTask extends JobSubmissionTask {
                     //taskStatus.setReason("Couldn't find job id in both submitted and verified steps");
                     //taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
                     return onFail("Couldn't find job id in both submitted and verified steps", false, null);
-                }else {
+                } else {
                     //GFacUtils.saveJobModel(processContext, jobModel);
                 }
 
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
index 25f8ec5..383fe37 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
@@ -103,6 +103,9 @@ public class PostWorkflowManager {
                 String processId = getProcessIdByJobId(jobStatusResult.getJobId());
                 String status = getStatusByJobId(jobStatusResult.getJobId());
 
+                logger.info("Starting the post workflow for job id : " + jobStatusResult.getJobId() + " with process id "
+                        + processId + ", gateway " + gateway + " and status " + status);
+
                 // TODO get cluster lock before that
                 if ("cancelled".equals(status)) {
 
@@ -151,8 +154,8 @@ public class PostWorkflowManager {
                         WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster",
                                 "wm-23", ServerSettings.getZookeeperConnection());
 
-                        workflowManager.launchWorkflow(UUID.randomUUID().toString(),
-                                allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true);
+                        workflowManager.launchWorkflow(processId + "-POST-" + UUID.randomUUID().toString(),
+                                allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true, false);
 
                     } else if (jobStatusResult.getState() == JobState.CANCELED) {
                         logger.info("Job " + jobStatusResult.getJobId() + " was externally cancelled");
@@ -176,25 +179,14 @@ public class PostWorkflowManager {
     private void runConsumer() throws InterruptedException {
         final Consumer<String, JobStatusResult> consumer = createConsumer();
 
-        final int giveUp = 100;   int noRecordsCount = 0;
-
         while (true) {
             final ConsumerRecords<String, JobStatusResult> consumerRecords = consumer.poll(1000);
-
-            /*if (consumerRecords.count() == 0) {
-                noRecordsCount++;
-                if (noRecordsCount > giveUp) break;
-                else continue;
-            }*/
-
             consumerRecords.forEach(record -> {
                 process(record.value());
             });
 
             consumer.commitAsync();
         }
-        //consumer.close();
-        //System.out.println("DONE");
     }
 
     public static void main(String[] args) throws Exception {
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
index 9814b01..3030375 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
@@ -1,21 +1,29 @@
 package org.apache.airavata.helix.impl.workflow;
 
-import org.apache.airavata.helix.core.AbstractTask;
+import org.apache.airavata.common.exception.AiravataException;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.common.utils.ThriftUtils;
 import org.apache.airavata.helix.core.OutPort;
 import org.apache.airavata.helix.impl.task.AiravataTask;
 import org.apache.airavata.helix.impl.task.EnvSetupTask;
 import org.apache.airavata.helix.impl.task.InputDataStagingTask;
-import org.apache.airavata.helix.impl.task.OutputDataStagingTask;
 import org.apache.airavata.helix.impl.task.submission.task.DefaultJobSubmissionTask;
 import org.apache.airavata.helix.workflow.WorkflowManager;
+import org.apache.airavata.messaging.core.*;
 import org.apache.airavata.model.experiment.ExperimentModel;
+import org.apache.airavata.model.messaging.event.MessageType;
+import org.apache.airavata.model.messaging.event.ProcessSubmitEvent;
 import org.apache.airavata.model.process.ProcessModel;
 import org.apache.airavata.model.task.TaskModel;
 import org.apache.airavata.model.task.TaskTypes;
 import org.apache.airavata.registry.core.experiment.catalog.impl.RegistryFactory;
-import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.ExperimentCatalog;
 import org.apache.airavata.registry.cpi.ExperimentCatalogModelType;
+import org.apache.airavata.registry.cpi.RegistryException;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+import org.apache.thrift.TBase;
+import org.apache.thrift.TException;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -25,11 +33,28 @@ import java.util.stream.Collectors;
 
 public class PreWorkflowManager {
 
+    private static final Logger logger = LogManager.getLogger(PreWorkflowManager.class);
+
+    private final Subscriber subscriber;
+
+    public PreWorkflowManager() throws AiravataException {
+        List<String> routingKeys = new ArrayList<>();
+        routingKeys.add(ServerSettings.getRabbitmqProcessExchangeName());
+        this.subscriber = MessagingFactory.getSubscriber(new ProcessLaunchMessageHandler(), routingKeys, Type.PROCESS_LAUNCH);
+    }
+
     public static void main(String[] args) throws Exception {
 
-        String processId = "PROCESS_5b252ad9-d630-4cf9-80e3-0c30c55d1001";
-        AppCatalog appCatalog = RegistryFactory.getAppCatalog();
-        ExperimentCatalog experimentCatalog = RegistryFactory.getDefaultExpCatalog();
+        PreWorkflowManager preWorkflowManager = new PreWorkflowManager();
+
+        //String processId = "PROCESS_5b252ad9-d630-4cf9-80e3-0c30c55d1001";
+        //AppCatalog appCatalog = RegistryFactory.getAppCatalog();
+
+    }
+
+    private String createAndLaunchPreWorkflow(String processId, String gateway) throws Exception {
+
+        ExperimentCatalog experimentCatalog = RegistryFactory.getExperimentCatalog(gateway);
 
         ProcessModel processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
         ExperimentModel experimentModel = (ExperimentModel) experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT, processModel.getExperimentId());
@@ -74,16 +99,49 @@ public class PreWorkflowManager {
             }
         }
 
-/*        DefaultJobSubmissionTask defaultJobSubmissionTask = new DefaultJobSubmissionTask();
-        defaultJobSubmissionTask.setGatewayId("default");
-        defaultJobSubmissionTask.setExperimentId("Clone_of_Mothur-Test1_0c9f627e-2c32-403e-a28a-2a8b10c21c1a");
-        defaultJobSubmissionTask.setProcessId("PROCESS_438a87cc-2dec-4edc-bfeb-31128df91bb6");
-        defaultJobSubmissionTask.setTaskId("TASK_612844a4-aedb-41a5-824f-9b20c76867f7");
-
-        List<AbstractTask> tasks = new ArrayList<>();
-        tasks.add(defaultJobSubmissionTask);
-*/
-        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster", "wm-22", "localhost:2199");
-        workflowManager.launchWorkflow(UUID.randomUUID().toString(), allTasks.stream().map(t -> (AiravataTask)t).collect(Collectors.toList()), true);
+        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster", "wm-22",
+                ServerSettings.getZookeeperConnection());
+        String workflowName = workflowManager.launchWorkflow(processId + "-PRE-" + UUID.randomUUID().toString(),
+                allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true, false);
+        return workflowName;
+    }
+
+    private class ProcessLaunchMessageHandler implements MessageHandler {
+
+        @Override
+        public void onMessage(MessageContext messageContext) {
+            logger.info(" Message Received with message id " + messageContext.getMessageId() + " and with message type: " + messageContext.getType());
+
+            if (messageContext.getType().equals(MessageType.LAUNCHPROCESS)) {
+                ProcessSubmitEvent event = new ProcessSubmitEvent();
+                TBase messageEvent = messageContext.getEvent();
+
+                try {
+                    byte[] bytes = ThriftUtils.serializeThriftObject(messageEvent);
+                    ThriftUtils.createThriftFromBytes(bytes, event);
+                } catch (TException e) {
+                    logger.error("Failed to fetch process submit event", e);
+                    subscriber.sendAck(messageContext.getDeliveryTag());
+                }
+
+                String processId = event.getProcessId();
+                String gateway = event.getGatewayId();
+
+                logger.info("Received process launch message for process " + processId + " in gateway " + gateway);
+
+                try {
+                    logger.info("Launching the pre workflow for process " + processId + " in gateway " + gateway );
+                    String workflowName = createAndLaunchPreWorkflow(processId, gateway);
+                    logger.info("Completed launching the pre workflow " + workflowName + " for process " + processId + " in gateway " + gateway );
+                    subscriber.sendAck(messageContext.getDeliveryTag());
+                } catch (Exception e) {
+                    logger.error("Failed to launch the pre workflow for process " + processId + " in gateway " + gateway, e);
+                    subscriber.sendAck(messageContext.getDeliveryTag());
+                }
+            } else {
+                logger.warn("Unknown message type");
+                subscriber.sendAck(messageContext.getDeliveryTag());
+            }
+        }
     }
 }

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 14/17: Adding deployment module to helix workflows

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit e864db36b93cc0b55bb4255d699ca851f711005c
Author: dimuthu <di...@gmail.com>
AuthorDate: Wed Mar 7 13:42:32 2018 -0500

    Adding deployment module to helix workflows
---
 modules/airavata-helix-distribution/pom.xml        |  55 ++++++++++
 .../src/main/assembly/bin-assembly.xml             | 111 +++++++++++++++++++++
 .../resources/bin/airavata-controller-start.sh     |  99 ++++++++++++++++++
 .../main/resources/bin/airavata-controller-stop.sh |  71 +++++++++++++
 .../main/resources/bin/airavata-monitor-start.sh   |  99 ++++++++++++++++++
 .../resources/bin/airavata-participant-start.sh    |  99 ++++++++++++++++++
 .../resources/bin/airavata-participant-stop.sh     |  71 +++++++++++++
 .../main/resources/bin/airavata-post-wm-start.sh   |  99 ++++++++++++++++++
 .../main/resources/bin/airavata-post-wm-stop.sh    |  71 +++++++++++++
 .../main/resources/bin/airavata-pre-wm-start.sh    |  99 ++++++++++++++++++
 .../src/main/resources/bin/airavata-pre-wm-stop.sh |  71 +++++++++++++
 .../src/main/resources/bin/setenv.sh               |  46 +++++++++
 .../resources/conf}/airavata-server.properties     |  15 ++-
 .../src/main/resources/conf/application.properties |   3 +
 .../src/main/resources/conf/cred_store.jks         | Bin 0 -> 499 bytes
 .../src/main/resources/conf/log4j.properties       |  13 +++
 .../helix/core/participant/HelixParticipant.java   |   9 +-
 modules/helix-spectator/pom.xml                    |   2 +-
 .../helix/impl}/controller/HelixController.java    |  40 +++++++-
 .../helix/impl/participant/GlobalParticipant.java  |  33 +++++-
 .../helix/impl/task/TaskOnFailException.java       |   1 -
 .../impl/task/submission/config/GroovyMapData.java |   6 +-
 .../helix/impl/workflow/PostWorkflowManager.java   |  70 ++-----------
 .../helix/impl/workflow/PreWorkflowManager.java    |  17 ++--
 .../src/main/resources/airavata-server.properties  |  15 ++-
 pom.xml                                            |   4 +-
 26 files changed, 1128 insertions(+), 91 deletions(-)

diff --git a/modules/airavata-helix-distribution/pom.xml b/modules/airavata-helix-distribution/pom.xml
new file mode 100644
index 0000000..e1001ad
--- /dev/null
+++ b/modules/airavata-helix-distribution/pom.xml
@@ -0,0 +1,55 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>airavata</artifactId>
+        <groupId>org.apache.airavata</groupId>
+        <version>0.17-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>airavata-helix-distribution</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.airavata</groupId>
+            <artifactId>helix-spectator</artifactId>
+            <version>0.17-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.5.5</version>
+                <executions>
+                    <execution>
+                        <id>distribution-package</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                        <configuration>
+                            <tarLongFileMode>posix</tarLongFileMode>
+                            <finalName>${archieve.name}-${project.version}</finalName>
+                            <descriptors>
+                                <descriptor>src/main/assembly/bin-assembly.xml</descriptor>
+                                <!-- <descriptor>src/main/assembly/src-assembly.xml</descriptor> -->
+                            </descriptors>
+                            <attach>false</attach>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+    <properties>
+        <archieve.name>helix-spectator</archieve.name>
+    </properties>
+
+</project>
\ No newline at end of file
diff --git a/modules/airavata-helix-distribution/src/main/assembly/bin-assembly.xml b/modules/airavata-helix-distribution/src/main/assembly/bin-assembly.xml
new file mode 100644
index 0000000..d1b77c4
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/assembly/bin-assembly.xml
@@ -0,0 +1,111 @@
+<!--
+
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<!DOCTYPE assembly [
+        <!ELEMENT assembly (id|includeBaseDirectory|baseDirectory|formats|fileSets|dependencySets)*>
+        <!ELEMENT id (#PCDATA)>
+        <!ELEMENT includeBaseDirectory (#PCDATA)>
+        <!ELEMENT baseDirectory (#PCDATA)>
+        <!ELEMENT formats (format)*>
+        <!ELEMENT format (#PCDATA)>
+        <!ELEMENT fileSets (fileSet)*>
+        <!ELEMENT fileSet (directory|outputDirectory|fileMode|includes)*>
+        <!ELEMENT directory (#PCDATA)>
+        <!ELEMENT outputDirectory (#PCDATA)>
+        <!ELEMENT includes (include)*>
+        <!ELEMENT include (#PCDATA)>
+        <!ELEMENT dependencySets (dependencySet)*>
+        <!ELEMENT dependencySet (outputDirectory|outputFileNameMapping|includes)*>
+        ]>
+<assembly>
+    <id>bin</id>
+    <includeBaseDirectory>true</includeBaseDirectory>
+    <baseDirectory>${archieve.name}-${version}</baseDirectory>
+    <formats>
+        <format>tar.gz</format>
+        <format>zip</format>
+    </formats>
+
+    <fileSets>
+
+
+        <!-- ********************** copy database scripts ********************** -->
+        <fileSet>
+            <directory>src/main/resources/bin</directory>
+            <outputDirectory>bin</outputDirectory>
+            <fileMode>777</fileMode>
+            <includes>
+                <include>*.sh</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>src/main/resources/conf</directory>
+            <outputDirectory>bin</outputDirectory>
+            <fileMode>777</fileMode>
+            <includes>
+                <include>airavata-server.properties</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>src/main/resources/conf</directory>
+            <outputDirectory>conf</outputDirectory>
+            <includes>
+                <include>airavata-server.properties</include>
+                <include>application.properties</include>
+                <include>log4j.properties</include>
+                <include>cred_store.jks</include>
+                <include>PBS_Groovy.template</include>
+                <include>SLURM_Groovy.template</include>
+                <include>LSF_Groovy.template</include>
+                <include>UGE_Groovy.template</include>
+                <include>FORK_Groovy.template</include>
+            </includes>
+        </fileSet>
+
+        <!-- Create logs directory -->
+        <fileSet>
+            <directory>./</directory>
+            <outputDirectory>logs</outputDirectory>
+            <excludes>
+                <exclude>*/**</exclude>
+            </excludes>
+        </fileSet>
+
+
+    </fileSets>
+
+    <dependencySets>
+
+        <dependencySet>
+            <useProjectArtifact>false</useProjectArtifact>
+            <outputDirectory>lib</outputDirectory>
+            <includes>
+                <include>*:*:jar</include>
+
+            </includes>
+            <excludes>
+                <exclude>mysql:mysql-connector-java</exclude>
+                <exclude>ch.qos.logback:logback-classic:jar</exclude>
+            </excludes>
+        </dependencySet>
+    </dependencySets>
+
+</assembly>
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-start.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-start.sh
new file mode 100644
index 0000000..d3556fb
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-start.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+# Capture user's working dir before changing directory
+CWD="$PWD"
+cd ${AIRAVATA_HOME}/bin
+LOGO_FILE="logo.txt"
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+EXTRA_ARGS=""
+SERVERS=""
+IS_DAEMON_MODE=false
+LOGO=true
+IS_SUBSET=false
+SUBSET=""
+DEFAULT_LOG_FILE="${AIRAVATA_HOME}/logs/controller-output.log"
+LOG_FILE=$DEFAULT_LOG_FILE
+
+# parse command arguments
+for var in "$@"
+do
+    case ${var} in
+        -xdebug)
+        	AIRAVATA_COMMAND="${AIRAVATA_COMMAND}"
+            JAVA_OPTS="$JAVA_OPTS -Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,server=y,address=8000"
+            shift
+        ;;
+        -security)
+            JAVA_OPTS="${JAVA_OPTS} -Djava.security.manager -Djava.security.policy=${AIRAVATA_HOME}/conf/axis2.policy -Daxis2.home=${AIRAVATA_HOME}"
+            shift
+        ;;
+        -d)
+	        IS_DAEMON_MODE=true
+	        shift
+	        ;;
+	    -nologo)
+	        LOGO=false
+            shift
+        ;;
+        -log)
+            shift
+            LOG_FILE="$1"
+            shift
+            # If relative path, expand to absolute path using the user's $CWD
+            if [ -z "`echo "$LOG_FILE" | egrep "^/"`" ]; then
+                LOG_FILE="${CWD}/${LOG_FILE}"
+            fi
+        ;;
+        -h)
+            echo "Usage: airavata-participant-start.sh"
+
+            echo "command options:"
+	        echo "  -d                  Start server in daemon mode"
+            echo "  -xdebug             Start Airavata Server under JPDA debugger"
+            echo "  -nologo             Do not show airavata logo"
+            echo "  -security           Enable Java 2 security"
+	        echo "  --<key>[=<value>]   Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+	        echo "  -log <LOG_FILE>     Where to redirect stdout/stderr (defaults to $DEFAULT_LOG_FILE)"
+            echo "  -h                  Display this help and exit"
+            shift
+            exit 0
+        ;;
+	    *)
+	        EXTRA_ARGS="${EXTRA_ARGS} ${var}"
+            shift
+        ;;
+    esac
+done
+
+AIRAVATA_COMMAND="--confDir=${AIRAVATA_HOME}/conf"
+
+if ${IS_DAEMON_MODE} ; then
+	echo "Starting airavata server/s in daemon mode..."
+	echo "Redirecting output to $LOG_FILE"
+	nohup java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.controller.HelixController ${AIRAVATA_COMMAND} $* > $LOG_FILE 2>&1 &
+else
+	java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.controller.HelixController ${AIRAVATA_COMMAND} $*
+fi
+
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-stop.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-stop.sh
new file mode 100644
index 0000000..b76aa4d
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-controller-stop.sh
@@ -0,0 +1,71 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+cd ${AIRAVATA_HOME}/bin
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+FORCE=false
+
+for var in "$@"
+do
+    case ${var} in
+    	-f | --force)
+	        FORCE=true
+            shift
+        ;;
+        -h)
+            echo "Usage: airavata-controller-stop.sh [command-options]"
+            echo "command options:"
+	        echo "  -f , --force       Force stop all airavata servers."
+	        echo "  --<key>[=<value>]  Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+            echo "  -h                 Display this help and exit"
+            shift
+            exit 0
+        ;;
+	*)
+            shift
+    esac
+done
+
+if ${FORCE} ; then
+	for f in `find . -name "controller_start_*"`; do
+	    # split file name using "_" underscore
+		f_split=(${f//_/ });
+		echo "Found process file : $f"
+		echo -n "    Sending kill signals to process ${f_split[2]}..."
+		out=`kill -9 ${f_split[2]} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+		echo -n "    Removing process file..."
+		out=`rm ${f} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+	done
+else
+    java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.server.ServerMain stop ${AIRAVATA_COMMAND} $*
+fi
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-monitor-start.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-monitor-start.sh
new file mode 100644
index 0000000..5e848bb
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-monitor-start.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+# Capture user's working dir before changing directory
+CWD="$PWD"
+cd ${AIRAVATA_HOME}/bin
+LOGO_FILE="logo.txt"
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+EXTRA_ARGS=""
+SERVERS=""
+IS_DAEMON_MODE=false
+LOGO=true
+IS_SUBSET=false
+SUBSET=""
+DEFAULT_LOG_FILE="${AIRAVATA_HOME}/logs/monitor-output.log"
+LOG_FILE=$DEFAULT_LOG_FILE
+
+# parse command arguments
+for var in "$@"
+do
+    case ${var} in
+        -xdebug)
+        	AIRAVATA_COMMAND="${AIRAVATA_COMMAND}"
+            JAVA_OPTS="$JAVA_OPTS -Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,server=y,address=8000"
+            shift
+        ;;
+        -security)
+            JAVA_OPTS="${JAVA_OPTS} -Djava.security.manager -Djava.security.policy=${AIRAVATA_HOME}/conf/axis2.policy -Daxis2.home=${AIRAVATA_HOME}"
+            shift
+        ;;
+        -d)
+	        IS_DAEMON_MODE=true
+	        shift
+	        ;;
+	    -nologo)
+	        LOGO=false
+            shift
+        ;;
+        -log)
+            shift
+            LOG_FILE="$1"
+            shift
+            # If relative path, expand to absolute path using the user's $CWD
+            if [ -z "`echo "$LOG_FILE" | egrep "^/"`" ]; then
+                LOG_FILE="${CWD}/${LOG_FILE}"
+            fi
+        ;;
+        -h)
+            echo "Usage: airavata-participant-start.sh"
+
+            echo "command options:"
+	        echo "  -d                  Start server in daemon mode"
+            echo "  -xdebug             Start Airavata Server under JPDA debugger"
+            echo "  -nologo             Do not show airavata logo"
+            echo "  -security           Enable Java 2 security"
+	        echo "  --<key>[=<value>]   Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+	        echo "  -log <LOG_FILE>     Where to redirect stdout/stderr (defaults to $DEFAULT_LOG_FILE)"
+            echo "  -h                  Display this help and exit"
+            shift
+            exit 0
+        ;;
+	    *)
+	        EXTRA_ARGS="${EXTRA_ARGS} ${var}"
+            shift
+        ;;
+    esac
+done
+
+AIRAVATA_COMMAND="--confDir=${AIRAVATA_HOME}/conf"
+
+if ${IS_DAEMON_MODE} ; then
+	echo "Starting airavata server/s in daemon mode..."
+	echo "Redirecting output to $LOG_FILE"
+	nohup java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.job.monitor.EmailBasedMonitor ${AIRAVATA_COMMAND} $* > $LOG_FILE 2>&1 &
+else
+	java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.job.monitor.EmailBasedMonitor ${AIRAVATA_COMMAND} $*
+fi
+
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-start.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-start.sh
new file mode 100644
index 0000000..0d39101
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-start.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+# Capture user's working dir before changing directory
+CWD="$PWD"
+cd ${AIRAVATA_HOME}/bin
+LOGO_FILE="logo.txt"
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+EXTRA_ARGS=""
+SERVERS=""
+IS_DAEMON_MODE=false
+LOGO=true
+IS_SUBSET=false
+SUBSET=""
+DEFAULT_LOG_FILE="${AIRAVATA_HOME}/logs/participant-output.log"
+LOG_FILE=$DEFAULT_LOG_FILE
+
+# parse command arguments
+for var in "$@"
+do
+    case ${var} in
+        -xdebug)
+        	AIRAVATA_COMMAND="${AIRAVATA_COMMAND}"
+            JAVA_OPTS="$JAVA_OPTS -Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,server=y,address=8000"
+            shift
+        ;;
+        -security)
+            JAVA_OPTS="${JAVA_OPTS} -Djava.security.manager -Djava.security.policy=${AIRAVATA_HOME}/conf/axis2.policy -Daxis2.home=${AIRAVATA_HOME}"
+            shift
+        ;;
+        -d)
+	        IS_DAEMON_MODE=true
+	        shift
+	        ;;
+	    -nologo)
+	        LOGO=false
+            shift
+        ;;
+        -log)
+            shift
+            LOG_FILE="$1"
+            shift
+            # If relative path, expand to absolute path using the user's $CWD
+            if [ -z "`echo "$LOG_FILE" | egrep "^/"`" ]; then
+                LOG_FILE="${CWD}/${LOG_FILE}"
+            fi
+        ;;
+        -h)
+            echo "Usage: airavata-participant-start.sh [server-name/s] [command-options]"
+
+            echo "command options:"
+	        echo "  -d                  Start server in daemon mode"
+            echo "  -xdebug             Start Airavata Server under JPDA debugger"
+            echo "  -nologo             Do not show airavata logo"
+            echo "  -security           Enable Java 2 security"
+	        echo "  --<key>[=<value>]   Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+	        echo "  -log <LOG_FILE>     Where to redirect stdout/stderr (defaults to $DEFAULT_LOG_FILE)"
+            echo "  -h                  Display this help and exit"
+            shift
+            exit 0
+        ;;
+	    *)
+	        EXTRA_ARGS="${EXTRA_ARGS} ${var}"
+            shift
+        ;;
+    esac
+done
+
+AIRAVATA_COMMAND="--confDir=${AIRAVATA_HOME}/conf"
+
+if ${IS_DAEMON_MODE} ; then
+	echo "Starting airavata server/s in daemon mode..."
+	echo "Redirecting output to $LOG_FILE"
+	nohup java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.participant.GlobalParticipant ${AIRAVATA_COMMAND} $* > $LOG_FILE 2>&1 &
+else
+	java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.participant.GlobalParticipant ${AIRAVATA_COMMAND} $*
+fi
+
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-stop.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-stop.sh
new file mode 100644
index 0000000..be1200c
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-participant-stop.sh
@@ -0,0 +1,71 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+cd ${AIRAVATA_HOME}/bin
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+FORCE=false
+
+for var in "$@"
+do
+    case ${var} in
+    	-f | --force)
+	        FORCE=true
+            shift
+        ;;
+        -h)
+            echo "Usage: airavata-participant-stop.sh [command-options]"
+            echo "command options:"
+	        echo "  -f , --force       Force stop all airavata servers."
+	        echo "  --<key>[=<value>]  Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+            echo "  -h                 Display this help and exit"
+            shift
+            exit 0
+        ;;
+	*)
+            shift
+    esac
+done
+
+if ${FORCE} ; then
+	for f in `find . -name "participant_start_*"`; do
+	    # split file name using "_" underscore
+		f_split=(${f//_/ });
+		echo "Found process file : $f"
+		echo -n "    Sending kill signals to process ${f_split[2]}..."
+		out=`kill -9 ${f_split[2]} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+		echo -n "    Removing process file..."
+		out=`rm ${f} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+	done
+else
+    java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.server.ServerMain stop ${AIRAVATA_COMMAND} $*
+fi
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-start.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-start.sh
new file mode 100644
index 0000000..2b9ae83
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-start.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+# Capture user's working dir before changing directory
+CWD="$PWD"
+cd ${AIRAVATA_HOME}/bin
+LOGO_FILE="logo.txt"
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+EXTRA_ARGS=""
+SERVERS=""
+IS_DAEMON_MODE=false
+LOGO=true
+IS_SUBSET=false
+SUBSET=""
+DEFAULT_LOG_FILE="${AIRAVATA_HOME}/logs/post-wm-output.log"
+LOG_FILE=$DEFAULT_LOG_FILE
+
+# parse command arguments
+for var in "$@"
+do
+    case ${var} in
+        -xdebug)
+        	AIRAVATA_COMMAND="${AIRAVATA_COMMAND}"
+            JAVA_OPTS="$JAVA_OPTS -Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,server=y,address=8000"
+            shift
+        ;;
+        -security)
+            JAVA_OPTS="${JAVA_OPTS} -Djava.security.manager -Djava.security.policy=${AIRAVATA_HOME}/conf/axis2.policy -Daxis2.home=${AIRAVATA_HOME}"
+            shift
+        ;;
+        -d)
+	        IS_DAEMON_MODE=true
+	        shift
+	        ;;
+	    -nologo)
+	        LOGO=false
+            shift
+        ;;
+        -log)
+            shift
+            LOG_FILE="$1"
+            shift
+            # If relative path, expand to absolute path using the user's $CWD
+            if [ -z "`echo "$LOG_FILE" | egrep "^/"`" ]; then
+                LOG_FILE="${CWD}/${LOG_FILE}"
+            fi
+        ;;
+        -h)
+            echo "Usage: airavata-participant-start.sh"
+
+            echo "command options:"
+	        echo "  -d                  Start server in daemon mode"
+            echo "  -xdebug             Start Airavata Server under JPDA debugger"
+            echo "  -nologo             Do not show airavata logo"
+            echo "  -security           Enable Java 2 security"
+	        echo "  --<key>[=<value>]   Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+	        echo "  -log <LOG_FILE>     Where to redirect stdout/stderr (defaults to $DEFAULT_LOG_FILE)"
+            echo "  -h                  Display this help and exit"
+            shift
+            exit 0
+        ;;
+	    *)
+	        EXTRA_ARGS="${EXTRA_ARGS} ${var}"
+            shift
+        ;;
+    esac
+done
+
+AIRAVATA_COMMAND="--confDir=${AIRAVATA_HOME}/conf"
+
+if ${IS_DAEMON_MODE} ; then
+	echo "Starting airavata server/s in daemon mode..."
+	echo "Redirecting output to $LOG_FILE"
+	nohup java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.workflow.PostWorkflowManager ${AIRAVATA_COMMAND} $* > $LOG_FILE 2>&1 &
+else
+	java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.workflow.PostWorkflowManager ${AIRAVATA_COMMAND} $*
+fi
+
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-stop.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-stop.sh
new file mode 100644
index 0000000..4493d6e
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-post-wm-stop.sh
@@ -0,0 +1,71 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+cd ${AIRAVATA_HOME}/bin
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+FORCE=false
+
+for var in "$@"
+do
+    case ${var} in
+    	-f | --force)
+	        FORCE=true
+            shift
+        ;;
+        -h)
+            echo "Usage: airavata-post-wm-stop.sh [command-options]"
+            echo "command options:"
+	        echo "  -f , --force       Force stop all airavata servers."
+	        echo "  --<key>[=<value>]  Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+            echo "  -h                 Display this help and exit"
+            shift
+            exit 0
+        ;;
+	*)
+            shift
+    esac
+done
+
+if ${FORCE} ; then
+	for f in `find . -name "post_wm_start_*"`; do
+	    # split file name using "_" underscore
+		f_split=(${f//_/ });
+		echo "Found process file : $f"
+		echo -n "    Sending kill signals to process ${f_split[2]}..."
+		out=`kill -9 ${f_split[2]} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+		echo -n "    Removing process file..."
+		out=`rm ${f} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+	done
+else
+    java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.server.ServerMain stop ${AIRAVATA_COMMAND} $*
+fi
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-start.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-start.sh
new file mode 100644
index 0000000..8d0b838
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-start.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+# Capture user's working dir before changing directory
+CWD="$PWD"
+cd ${AIRAVATA_HOME}/bin
+LOGO_FILE="logo.txt"
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+EXTRA_ARGS=""
+SERVERS=""
+IS_DAEMON_MODE=false
+LOGO=true
+IS_SUBSET=false
+SUBSET=""
+DEFAULT_LOG_FILE="${AIRAVATA_HOME}/logs/pre-wm-output.log"
+LOG_FILE=$DEFAULT_LOG_FILE
+
+# parse command arguments
+for var in "$@"
+do
+    case ${var} in
+        -xdebug)
+        	AIRAVATA_COMMAND="${AIRAVATA_COMMAND}"
+            JAVA_OPTS="$JAVA_OPTS -Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,server=y,address=8000"
+            shift
+        ;;
+        -security)
+            JAVA_OPTS="${JAVA_OPTS} -Djava.security.manager -Djava.security.policy=${AIRAVATA_HOME}/conf/axis2.policy -Daxis2.home=${AIRAVATA_HOME}"
+            shift
+        ;;
+        -d)
+	        IS_DAEMON_MODE=true
+	        shift
+	        ;;
+	    -nologo)
+	        LOGO=false
+            shift
+        ;;
+        -log)
+            shift
+            LOG_FILE="$1"
+            shift
+            # If relative path, expand to absolute path using the user's $CWD
+            if [ -z "`echo "$LOG_FILE" | egrep "^/"`" ]; then
+                LOG_FILE="${CWD}/${LOG_FILE}"
+            fi
+        ;;
+        -h)
+            echo "Usage: airavata-participant-start.sh"
+
+            echo "command options:"
+	        echo "  -d                  Start server in daemon mode"
+            echo "  -xdebug             Start Airavata Server under JPDA debugger"
+            echo "  -nologo             Do not show airavata logo"
+            echo "  -security           Enable Java 2 security"
+	        echo "  --<key>[=<value>]   Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+	        echo "  -log <LOG_FILE>     Where to redirect stdout/stderr (defaults to $DEFAULT_LOG_FILE)"
+            echo "  -h                  Display this help and exit"
+            shift
+            exit 0
+        ;;
+	    *)
+	        EXTRA_ARGS="${EXTRA_ARGS} ${var}"
+            shift
+        ;;
+    esac
+done
+
+AIRAVATA_COMMAND="--confDir=${AIRAVATA_HOME}/conf"
+
+if ${IS_DAEMON_MODE} ; then
+	echo "Starting airavata server/s in daemon mode..."
+	echo "Redirecting output to $LOG_FILE"
+	nohup java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.workflow.PreWorkflowManager ${AIRAVATA_COMMAND} $* > $LOG_FILE 2>&1 &
+else
+	java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.helix.impl.workflow.PreWorkflowManager ${AIRAVATA_COMMAND} $*
+fi
+
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-stop.sh b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-stop.sh
new file mode 100644
index 0000000..4493d6e
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/airavata-pre-wm-stop.sh
@@ -0,0 +1,71 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+. `dirname $0`/setenv.sh
+cd ${AIRAVATA_HOME}/bin
+
+JAVA_OPTS=""
+AIRAVATA_COMMAND=""
+FORCE=false
+
+for var in "$@"
+do
+    case ${var} in
+    	-f | --force)
+	        FORCE=true
+            shift
+        ;;
+        -h)
+            echo "Usage: airavata-post-wm-stop.sh [command-options]"
+            echo "command options:"
+	        echo "  -f , --force       Force stop all airavata servers."
+	        echo "  --<key>[=<value>]  Server setting(s) to override or introduce (overrides values in airavata-server.properties)"
+            echo "  -h                 Display this help and exit"
+            shift
+            exit 0
+        ;;
+	*)
+            shift
+    esac
+done
+
+if ${FORCE} ; then
+	for f in `find . -name "post_wm_start_*"`; do
+	    # split file name using "_" underscore
+		f_split=(${f//_/ });
+		echo "Found process file : $f"
+		echo -n "    Sending kill signals to process ${f_split[2]}..."
+		out=`kill -9 ${f_split[2]} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+		echo -n "    Removing process file..."
+		out=`rm ${f} 2>&1`
+		if [ -z "$out" ]; then
+		    echo "done"
+		else
+		    echo "failed (REASON: $out)"
+		fi
+	done
+else
+    java ${JAVA_OPTS} -classpath "${AIRAVATA_CLASSPATH}" \
+    org.apache.airavata.server.ServerMain stop ${AIRAVATA_COMMAND} $*
+fi
diff --git a/modules/airavata-helix-distribution/src/main/resources/bin/setenv.sh b/modules/airavata-helix-distribution/src/main/resources/bin/setenv.sh
new file mode 100755
index 0000000..9e894e1
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/bin/setenv.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# resolve links - $0 may be a softlink
+PRG="$0"
+
+while [ -h "$PRG" ]; do
+  ls=`ls -ld "$PRG"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    PRG="$link"
+  else
+    PRG=`dirname "$PRG"`/"$link"
+  fi
+done
+
+PRGDIR=`dirname "$PRG"`
+
+# Only set AIRAVATA_HOME if not already set
+[ -z "$AIRAVATA_HOME" ] && AIRAVATA_HOME=`cd "$PRGDIR/.." ; pwd`
+
+AIRAVATA_CLASSPATH=""
+
+for f in "$AIRAVATA_HOME"/lib/*.jar
+do
+  AIRAVATA_CLASSPATH="$AIRAVATA_CLASSPATH":$f
+done
+
+export AIRAVATA_HOME
+export AIRAVATA_CLASSPATH
diff --git a/modules/helix-spectator/src/main/resources/airavata-server.properties b/modules/airavata-helix-distribution/src/main/resources/conf/airavata-server.properties
similarity index 96%
copy from modules/helix-spectator/src/main/resources/airavata-server.properties
copy to modules/airavata-helix-distribution/src/main/resources/conf/airavata-server.properties
index b54b28c..19b3b3d 100644
--- a/modules/helix-spectator/src/main/resources/airavata-server.properties
+++ b/modules/airavata-helix-distribution/src/main/resources/conf/airavata-server.properties
@@ -260,13 +260,24 @@ email.based.monitor.store.protocol=imaps
 email.based.monitoring.period=10000
 
 ###########################################################################
+#Helix workflow manager configurations
+###########################################################################
+
+kafka.broker.url=localhost:9092
+kafka.broker.topic=parsed-data
+kafka.broker.consumer.group=MonitoringConsumer
+helix.cluster.name=AiravataDemoCluster
+pre.workflow.manager.name=prewm
+post.workflow.manager.name=postwm
+
+###########################################################################
 # AMQP Notification Configuration
 ###########################################################################
 #for simple scenarios we can use the guest user
 #rabbitmq.broker.url=amqp://localhost:5672
 #for production scenarios, give url as amqp://userName:password@hostName:portNumber/virtualHost, create user, virtualhost
 # and give permissions, refer: http://blog.dtzq.com/2012/06/rabbitmq-users-and-virtual-hosts.html
-rabbitmq.broker.url=amqp://airavata:123456@192.168.99.102:5672/master
+rabbitmq.broker.url=amqp://rabbit:rabbit123456@149.165.168.248:5672/master
 rabbitmq.status.exchange.name=status_exchange
 rabbitmq.process.exchange.name=process_exchange
 rabbitmq.experiment.exchange.name=experiment_exchange
@@ -279,7 +290,7 @@ experiment.launch..queue.name=experiment.launch.queue
 # Zookeeper Server Configuration
 ###########################################################################
 embedded.zk=false
-zookeeper.server.connection=192.168.99.102:2181
+zookeeper.server.connection=localhost:2199
 zookeeper.timeout=30000
 
 ########################################################################
diff --git a/modules/airavata-helix-distribution/src/main/resources/conf/application.properties b/modules/airavata-helix-distribution/src/main/resources/conf/application.properties
new file mode 100644
index 0000000..b4b8048
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/conf/application.properties
@@ -0,0 +1,3 @@
+zookeeper.connection.url=localhost:2199
+helix.cluster.name=AiravataDemoCluster
+participant.name=all-p3
\ No newline at end of file
diff --git a/modules/airavata-helix-distribution/src/main/resources/conf/cred_store.jks b/modules/airavata-helix-distribution/src/main/resources/conf/cred_store.jks
new file mode 100644
index 0000000..aca0a04
Binary files /dev/null and b/modules/airavata-helix-distribution/src/main/resources/conf/cred_store.jks differ
diff --git a/modules/airavata-helix-distribution/src/main/resources/conf/log4j.properties b/modules/airavata-helix-distribution/src/main/resources/conf/log4j.properties
new file mode 100644
index 0000000..04aa72a
--- /dev/null
+++ b/modules/airavata-helix-distribution/src/main/resources/conf/log4j.properties
@@ -0,0 +1,13 @@
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=INFO, A1
+
+log4j.category.org.apache.helix=WARN
+log4j.category.org.apache.zookeeper=WARN
+log4j.category.org.apache.airavata.helix.impl.task.submission.config.GroovyMapData=TRACE
+log4j.category.org.apache.airavata.helix.impl.task.submission.JobSubmissionTask=DEBUG
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] [E=%X{experiment},P=%X{process},T=%X{task},G=%X{gateway}] %-5p %c %x - %m%n
\ No newline at end of file
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
index 503f5ca..a7e5a64 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
+++ b/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/participant/HelixParticipant.java
@@ -20,6 +20,7 @@ import org.apache.helix.task.TaskStateModelFactory;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
@@ -43,12 +44,16 @@ public class HelixParticipant <T extends AbstractTask> implements Runnable {
     private PropertyResolver propertyResolver;
     private Class<T> taskClass;
 
-    public HelixParticipant(String propertyFile, Class<T> taskClass, String taskTypeName) throws IOException {
+    public HelixParticipant(String propertyFile, Class<T> taskClass, String taskTypeName, boolean readPropertyFromFile) throws IOException {
 
         logger.info("Initializing Participant Node");
 
         this.propertyResolver = new PropertyResolver();
-        propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
+        if (readPropertyFromFile) {
+            propertyResolver.loadFromFile(new File(propertyFile));
+        } else {
+            propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
+        }
 
         this.zkAddress = propertyResolver.get("zookeeper.connection.url");
         this.clusterName = propertyResolver.get("helix.cluster.name");
diff --git a/modules/helix-spectator/pom.xml b/modules/helix-spectator/pom.xml
index 326d7ef..5c36d2c 100644
--- a/modules/helix-spectator/pom.xml
+++ b/modules/helix-spectator/pom.xml
@@ -53,7 +53,7 @@
         <dependency>
             <groupId>org.apache.kafka</groupId>
             <artifactId>kafka-clients</artifactId>
-            <version>1.0.0</version>
+            <version>${kafka-clients.version}</version>
         </dependency>
         <dependency>
             <groupId>org.apache.airavata</groupId>
diff --git a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/controller/HelixController.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
similarity index 64%
rename from modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/controller/HelixController.java
rename to modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
index cdc27f7..11d7129 100644
--- a/modules/airavata-helix/task-core/src/main/java/org/apache/airavata/helix/core/controller/HelixController.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/controller/HelixController.java
@@ -1,10 +1,11 @@
-package org.apache.airavata.helix.core.controller;
+package org.apache.airavata.helix.impl.controller;
 
 import org.apache.airavata.helix.core.util.PropertyResolver;
 import org.apache.helix.controller.HelixControllerMain;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.concurrent.CountDownLatch;
 
@@ -26,10 +27,14 @@ public class HelixController implements Runnable {
     private CountDownLatch startLatch = new CountDownLatch(1);
     private CountDownLatch stopLatch = new CountDownLatch(1);
 
-    public HelixController(String propertyFile) throws IOException {
+    public HelixController(String propertyFile, boolean readPropertyFromFile) throws IOException {
 
         PropertyResolver propertyResolver = new PropertyResolver();
-        propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
+        if (readPropertyFromFile) {
+            propertyResolver.loadFromFile(new File(propertyFile));
+        } else {
+            propertyResolver.loadInputStream(this.getClass().getClassLoader().getResourceAsStream(propertyFile));
+        }
 
         this.clusterName = propertyResolver.get("helix.cluster.name");
         this.controllerName = propertyResolver.get("helix.controller.name");
@@ -38,6 +43,9 @@ public class HelixController implements Runnable {
 
     public void run() {
         try {
+            logger.info("Connection to helix cluster : " + clusterName + " with name : " + controllerName);
+            logger.info("Zookeeper connection string " + zkAddress);
+
             zkHelixManager = HelixControllerMain.startHelixController(zkAddress, clusterName,
                     controllerName, HelixControllerMain.STANDALONE);
             startLatch.countDown();
@@ -82,10 +90,32 @@ public class HelixController implements Runnable {
 
     public static void main(String args[]) {
         try {
-            HelixController helixController = new HelixController("application.properties");
+
+            logger.info("Starting helix controller");
+            String confDir = null;
+            if (args != null) {
+                for (String arg : args) {
+                    if (arg.startsWith("--confDir=")) {
+                        confDir = arg.substring("--confDir=".length());
+                    }
+                }
+            }
+
+            String propertiesFile = "application.properties";
+            boolean readPropertyFromFile = false;
+
+            if (confDir != null && !confDir.isEmpty()) {
+                propertiesFile = confDir.endsWith(File.separator)? confDir + propertiesFile : confDir + File.separator + propertiesFile;
+                readPropertyFromFile = true;
+            }
+
+            logger.info("Using configuration file " + propertiesFile);
+
+            HelixController helixController = new HelixController(propertiesFile, readPropertyFromFile);
             helixController.start();
+
         } catch (IOException e) {
-            e.printStackTrace();
+            logger.error("Failed to start the helix controller", e);
         }
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
index 4849934..7dd5c99 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/participant/GlobalParticipant.java
@@ -7,13 +7,18 @@ import org.apache.airavata.helix.task.api.annotation.TaskDef;
 import org.apache.helix.task.Task;
 import org.apache.helix.task.TaskCallbackContext;
 import org.apache.helix.task.TaskFactory;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
 public class GlobalParticipant extends HelixParticipant {
 
+    private static final Logger logger = LogManager.getLogger(GlobalParticipant.class);
+
     private String[] taskClasses = {
         "org.apache.airavata.helix.impl.task.env.EnvSetupTask",
         "org.apache.airavata.helix.impl.task.staging.InputDataStagingTask",
@@ -52,17 +57,35 @@ public class GlobalParticipant extends HelixParticipant {
                 e.printStackTrace();
             }
         }
-
-
         return taskRegistry;
     }
 
-    public GlobalParticipant(String propertyFile, Class taskClass, String taskTypeName) throws IOException {
-        super(propertyFile, taskClass, taskTypeName);
+    public GlobalParticipant(String propertyFile, Class taskClass, String taskTypeName, boolean readPropertyFromFile) throws IOException {
+        super(propertyFile, taskClass, taskTypeName, readPropertyFromFile);
     }
 
     public static void main(String args[]) throws IOException {
-        GlobalParticipant participant = new GlobalParticipant("application.properties", null, null);
+
+        String confDir = null;
+        if (args != null) {
+            for (String arg : args) {
+                if (arg.startsWith("--confDir=")) {
+                    confDir = arg.substring("--confDir=".length());
+                }
+            }
+        }
+
+        String propertiesFile = "application.properties";
+        boolean readPropertyFromFile = false;
+
+        if (confDir != null && !confDir.isEmpty()) {
+            propertiesFile = confDir.endsWith(File.separator)? confDir + propertiesFile : confDir + File.separator + propertiesFile;
+            readPropertyFromFile = true;
+        }
+
+        logger.info("Using configuration file " + propertiesFile);
+
+        GlobalParticipant participant = new GlobalParticipant(propertiesFile, null, null, readPropertyFromFile);
         Thread t = new Thread(participant);
         t.start();
     }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
index 196a219..1b24d77 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/TaskOnFailException.java
@@ -2,7 +2,6 @@ package org.apache.airavata.helix.impl.task;
 
 public class TaskOnFailException extends Exception {
 
-
     private String reason;
     private boolean critical;
     private Throwable e;
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
index 5414a46..8c19763 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/task/submission/config/GroovyMapData.java
@@ -447,14 +447,14 @@ public class GroovyMapData {
             String error = "Template file '" + templateName + "' not found";
             throw new Exception(error);
         }
-        File template = new File(templateUrl.getPath());
+        //File template = new File(templateUrl.getPath());
         TemplateEngine engine = new GStringTemplateEngine();
         Writable make;
         try {
 
-            make = engine.createTemplate(template).make(toImmutableMap());
+            make = engine.createTemplate(templateUrl).make(toImmutableMap());
         } catch (Exception e) {
-            throw new Exception("Error while generating script using groovy map");
+            throw new Exception("Error while generating script using groovy map for template " + templateUrl.getPath(), e);
         }
 
         if (logger.isTraceEnabled()) {
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
index b0de43a..225f81d 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PostWorkflowManager.java
@@ -47,8 +47,8 @@ public class PostWorkflowManager {
 
     private static final Logger logger = LogManager.getLogger(PostWorkflowManager.class);
 
-    private final String BOOTSTRAP_SERVERS = "localhost:9092";
-    private final String TOPIC = "parsed-data";
+    //private final String BOOTSTRAP_SERVERS = "localhost:9092";
+    //private final String TOPIC = "parsed-data";
 
     private CuratorFramework curatorClient = null;
     private Publisher statusPublisher;
@@ -59,16 +59,16 @@ public class PostWorkflowManager {
         this.curatorClient.start();
     }
 
-    private Consumer<String, JobStatusResult> createConsumer() {
+    private Consumer<String, JobStatusResult> createConsumer() throws ApplicationSettingsException {
         final Properties props = new Properties();
-        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
-        props.put(ConsumerConfig.GROUP_ID_CONFIG, "MonitoringConsumer");
+        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, ServerSettings.getSetting("kafka.broker.url"));
+        props.put(ConsumerConfig.GROUP_ID_CONFIG, ServerSettings.getSetting("kafka.broker.consumer.group"));
         props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
         props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JobStatusResultDeserializer.class.getName());
         // Create the consumer using props.
         final Consumer<String, JobStatusResult> consumer = new KafkaConsumer<String, JobStatusResult>(props);
         // Subscribe to the topic.
-        consumer.subscribe(Collections.singletonList(TOPIC));
+        consumer.subscribe(Collections.singletonList(ServerSettings.getSetting("kafka.broker.topic")));
         return consumer;
     }
 
@@ -184,8 +184,10 @@ public class PostWorkflowManager {
                         }
                         allTasks.add(completingTask);
 
-                        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster",
-                                "wm-23", ServerSettings.getZookeeperConnection());
+                        WorkflowManager workflowManager = new WorkflowManager(
+                                ServerSettings.getSetting("helix.cluster.name"),
+                                ServerSettings.getSetting("post.workflow.manager.name"),
+                                ServerSettings.getZookeeperConnection());
 
                         workflowManager.launchWorkflow(processId + "-POST-" + UUID.randomUUID().toString(),
                                 allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true, false);
@@ -209,7 +211,7 @@ public class PostWorkflowManager {
         }
     }
 
-    private void runConsumer() throws InterruptedException {
+    private void runConsumer() throws ApplicationSettingsException {
         final Consumer<String, JobStatusResult> consumer = createConsumer();
 
         while (true) {
@@ -269,55 +271,5 @@ public class PostWorkflowManager {
         PostWorkflowManager postManager = new PostWorkflowManager();
         postManager.init();
         postManager.runConsumer();
-        /*
-        String processId = "PROCESS_5b252ad9-d630-4cf9-80e3-0c30c55d1001";
-        ExperimentCatalog experimentCatalog = RegistryFactory.getDefaultExpCatalog();
-
-        ProcessModel processModel = (ProcessModel) experimentCatalog.get(ExperimentCatalogModelType.PROCESS, processId);
-        ExperimentModel experimentModel = (ExperimentModel) experimentCatalog.get(ExperimentCatalogModelType.EXPERIMENT, processModel.getExperimentId());
-        String taskDag = processModel.getTaskDag();
-        List<TaskModel> taskList = processModel.getTasks();
-
-        String[] taskIds = taskDag.split(",");
-        final List<AiravataTask> allTasks = new ArrayList<>();
-
-        boolean jobSubmissionFound = false;
-
-        for (String taskId : taskIds) {
-            Optional<TaskModel> model = taskList.stream().filter(taskModel -> taskModel.getTaskId().equals(taskId)).findFirst();
-
-            if (model.isPresent()) {
-                TaskModel taskModel = model.get();
-                AiravataTask airavataTask = null;
-                if (taskModel.getTaskType() == TaskTypes.ENV_SETUP) {
-                    //airavataTask = new EnvSetupTask();
-                } else if (taskModel.getTaskType() == TaskTypes.JOB_SUBMISSION) {
-                    //airavataTask = new DefaultJobSubmissionTask();
-                    //airavataTask.setRetryCount(1);
-                    jobSubmissionFound = true;
-                } else if (taskModel.getTaskType() == TaskTypes.DATA_STAGING) {
-                    if (jobSubmissionFound) {
-                        airavataTask = new OutputDataStagingTask();
-                    } else {
-                        //airavataTask = new InputDataStagingTask();
-                    }
-                }
-
-                if (airavataTask != null) {
-                    airavataTask.setGatewayId(experimentModel.getGatewayId());
-                    airavataTask.setExperimentId(experimentModel.getExperimentId());
-                    airavataTask.setProcessId(processModel.getProcessId());
-                    airavataTask.setTaskId(taskModel.getTaskId());
-                    if (allTasks.size() > 0) {
-                        allTasks.get(allTasks.size() -1).setNextTask(new OutPort(airavataTask.getTaskId(), airavataTask));
-                    }
-                    allTasks.add(airavataTask);
-                }
-            }
-        }
-
-        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster", "wm-22", "localhost:2199");
-        workflowManager.launchWorkflow(UUID.randomUUID().toString(), allTasks.stream().map(t -> (AiravataTask)t).collect(Collectors.toList()), true);
-        */
     }
 }
diff --git a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
index ac29c9d..18a6627 100644
--- a/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
+++ b/modules/helix-spectator/src/main/java/org/apache/airavata/helix/impl/workflow/PreWorkflowManager.java
@@ -42,15 +42,6 @@ public class PreWorkflowManager {
         this.subscriber = MessagingFactory.getSubscriber(new ProcessLaunchMessageHandler(), routingKeys, Type.PROCESS_LAUNCH);
     }
 
-    public static void main(String[] args) throws Exception {
-
-        PreWorkflowManager preWorkflowManager = new PreWorkflowManager();
-
-        //String processId = "PROCESS_5b252ad9-d630-4cf9-80e3-0c30c55d1001";
-        //AppCatalog appCatalog = RegistryFactory.getAppCatalog();
-
-    }
-
     private String createAndLaunchPreWorkflow(String processId, String gateway) throws Exception {
 
         ExperimentCatalog experimentCatalog = RegistryFactory.getExperimentCatalog(gateway);
@@ -98,13 +89,19 @@ public class PreWorkflowManager {
             }
         }
 
-        WorkflowManager workflowManager = new WorkflowManager("AiravataDemoCluster", "wm-22",
+        WorkflowManager workflowManager = new WorkflowManager(
+                ServerSettings.getSetting("helix.cluster.name"),
+                ServerSettings.getSetting("post.workflow.manager.name"),
                 ServerSettings.getZookeeperConnection());
         String workflowName = workflowManager.launchWorkflow(processId + "-PRE-" + UUID.randomUUID().toString(),
                 allTasks.stream().map(t -> (AiravataTask) t).collect(Collectors.toList()), true, false);
         return workflowName;
     }
 
+    public static void main(String[] args) throws Exception {
+        PreWorkflowManager preWorkflowManager = new PreWorkflowManager();
+    }
+
     private class ProcessLaunchMessageHandler implements MessageHandler {
 
         @Override
diff --git a/modules/helix-spectator/src/main/resources/airavata-server.properties b/modules/helix-spectator/src/main/resources/airavata-server.properties
index b54b28c..19b3b3d 100644
--- a/modules/helix-spectator/src/main/resources/airavata-server.properties
+++ b/modules/helix-spectator/src/main/resources/airavata-server.properties
@@ -260,13 +260,24 @@ email.based.monitor.store.protocol=imaps
 email.based.monitoring.period=10000
 
 ###########################################################################
+#Helix workflow manager configurations
+###########################################################################
+
+kafka.broker.url=localhost:9092
+kafka.broker.topic=parsed-data
+kafka.broker.consumer.group=MonitoringConsumer
+helix.cluster.name=AiravataDemoCluster
+pre.workflow.manager.name=prewm
+post.workflow.manager.name=postwm
+
+###########################################################################
 # AMQP Notification Configuration
 ###########################################################################
 #for simple scenarios we can use the guest user
 #rabbitmq.broker.url=amqp://localhost:5672
 #for production scenarios, give url as amqp://userName:password@hostName:portNumber/virtualHost, create user, virtualhost
 # and give permissions, refer: http://blog.dtzq.com/2012/06/rabbitmq-users-and-virtual-hosts.html
-rabbitmq.broker.url=amqp://airavata:123456@192.168.99.102:5672/master
+rabbitmq.broker.url=amqp://rabbit:rabbit123456@149.165.168.248:5672/master
 rabbitmq.status.exchange.name=status_exchange
 rabbitmq.process.exchange.name=process_exchange
 rabbitmq.experiment.exchange.name=experiment_exchange
@@ -279,7 +290,7 @@ experiment.launch..queue.name=experiment.launch.queue
 # Zookeeper Server Configuration
 ###########################################################################
 embedded.zk=false
-zookeeper.server.connection=192.168.99.102:2181
+zookeeper.server.connection=localhost:2199
 zookeeper.timeout=30000
 
 ########################################################################
diff --git a/pom.xml b/pom.xml
index c8e72db..834afe7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -30,6 +30,7 @@
     </prerequisites>
     <modules>
         <module>modules/job-monitor</module>
+        <module>modules/airavata-helix-distribution</module>
     </modules>
 
     <parent>
@@ -115,7 +116,7 @@
         <snakeyaml.version>1.15</snakeyaml.version>
         <maven.javadoc.failOnError>false</maven.javadoc.failOnError>
         <maven.replacer.plugin.version>1.5.3</maven.replacer.plugin.version>
-        <kafka-clients.version>0.8.2.2</kafka-clients.version>
+        <kafka-clients.version>1.0.0</kafka-clients.version>
         <logback.version>1.1.6</logback.version>
         <json.version>20160212</json.version>
         <commons.io.version>2.4</commons.io.version>
@@ -701,6 +702,7 @@
                 <module>modules/compute-account-provisioning</module>
                 <module>modules/airavata-helix</module>
                 <module>modules/helix-spectator</module>
+                <module>modules/airavata-helix-distribution</module>
             </modules>
         </profile>
         <profile>

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.

[airavata] 16/17: Moving helix-spectator module to airavata-helix module

Posted by di...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch helix-integration
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 005341372d92414a0e5589ec039cd209d32de030
Author: dimuthu <di...@gmail.com>
AuthorDate: Wed Mar 7 14:09:58 2018 -0500

    Moving helix-spectator module to airavata-helix module
---
 modules/airavata-helix/helix-spectator/pom.xml | 4 ++--
 modules/airavata-helix/pom.xml                 | 1 +
 pom.xml                                        | 1 -
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/modules/airavata-helix/helix-spectator/pom.xml b/modules/airavata-helix/helix-spectator/pom.xml
index 5c36d2c..5b3605a 100644
--- a/modules/airavata-helix/helix-spectator/pom.xml
+++ b/modules/airavata-helix/helix-spectator/pom.xml
@@ -3,10 +3,10 @@
          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
-        <artifactId>airavata</artifactId>
+        <artifactId>airavata-helix</artifactId>
         <groupId>org.apache.airavata</groupId>
         <version>0.17-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
+        <relativePath>../pom.xml</relativePath>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
diff --git a/modules/airavata-helix/pom.xml b/modules/airavata-helix/pom.xml
index 05938fd..2dc6893 100644
--- a/modules/airavata-helix/pom.xml
+++ b/modules/airavata-helix/pom.xml
@@ -19,6 +19,7 @@
         <module>task-api</module>
         <module>task-core</module>
         <module>workflow-impl</module>
+        <module>helix-spectator</module>
     </modules>
 
 </project>
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 834afe7..8ccb992 100644
--- a/pom.xml
+++ b/pom.xml
@@ -701,7 +701,6 @@
                 <!--<module>modules/test-suite</module>-->
                 <module>modules/compute-account-provisioning</module>
                 <module>modules/airavata-helix</module>
-                <module>modules/helix-spectator</module>
                 <module>modules/airavata-helix-distribution</module>
             </modules>
         </profile>

-- 
To stop receiving notification emails like this one, please contact
dimuthuupe@apache.org.