You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by mx...@apache.org on 2017/06/20 16:14:47 UTC

[1/4] incubator-ariatosca git commit: ARIA-54 Prepare for ARIA packaging [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-278-Remove-core-tasks 3e119df50 -> 1fee85c41 (forced update)


ARIA-54 Prepare for ARIA packaging

Preparations for ARIA packaging:
 - Added CHANGELOG file
 - Added CONTRIBUTING file
 - Added DISCLAIMER file
 - Updated Makefile
 - Converted README from md to rst for PyPI compatiability
 - Removed outdated TODO file
 - Added long_description, download_url to setup.py metadata
 - Modified setup.py url metadata to point at ASF domain
 - Added more badges to README


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/e6cf67ec
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/e6cf67ec
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/e6cf67ec

Branch: refs/heads/ARIA-278-Remove-core-tasks
Commit: e6cf67ec1230bf46febc11d65122ee3c0eeebc10
Parents: 2149a5e
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Mon Jun 5 13:24:49 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Thu Jun 15 12:03:59 2017 +0300

----------------------------------------------------------------------
 CHANGELOG.rst         |   4 ++
 CONTRIBUTING          |   3 +
 DISCLAIMER            |  10 ++++
 MANIFEST.in           |  10 +++-
 Makefile              |  56 ++++++++++++------
 README.md             | 120 --------------------------------------
 README.rst            | 140 +++++++++++++++++++++++++++++++++++++++++++++
 TODO.md               |   8 ---
 docs/conf.py          |   3 +-
 docs/requirements.txt |   4 +-
 setup.py              |   8 ++-
 11 files changed, 214 insertions(+), 152 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/CHANGELOG.rst
----------------------------------------------------------------------
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
new file mode 100644
index 0000000..6abb1af
--- /dev/null
+++ b/CHANGELOG.rst
@@ -0,0 +1,4 @@
+0.1.0
+-----
+
+ * Initial release.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/CONTRIBUTING
----------------------------------------------------------------------
diff --git a/CONTRIBUTING b/CONTRIBUTING
new file mode 100644
index 0000000..4124003
--- /dev/null
+++ b/CONTRIBUTING
@@ -0,0 +1,3 @@
+Contribution guide is available on our Confluence:
+
+https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/DISCLAIMER
----------------------------------------------------------------------
diff --git a/DISCLAIMER b/DISCLAIMER
new file mode 100644
index 0000000..358d8e1
--- /dev/null
+++ b/DISCLAIMER
@@ -0,0 +1,10 @@
+Apache AriaTosca is an effort undergoing incubation at the Apache Software
+Foundation (ASF), sponsored by the Apache Incubator.
+
+Incubation is required of all newly accepted projects until a further review
+indicates that the infrastructure, communications, and decision making process
+have stabilized in a manner consistent with other successful ASF projects.
+
+While incubation status is not necessarily a reflection of the completeness
+or stability of the code, it does indicate that the project has yet to be
+fully endorsed by the ASF.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/MANIFEST.in
----------------------------------------------------------------------
diff --git a/MANIFEST.in b/MANIFEST.in
index 6c79a3a..877a7dd 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,10 @@
-include requirements.txt
-include VERSION
+include CONTRIBUTING
 include LICENSE
+include NOTICE
+include VERSION
+include CHANGELOG.rst
+include README.rst
+include requirements.txt
+recursive-include docs/html *
 recursive-include examples *
+prune docs/html/.doctrees

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index 3bafd3b..a857ca7 100644
--- a/Makefile
+++ b/Makefile
@@ -13,34 +13,54 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-EXTENSIONS=extensions
-DOCS=docs
-HTML=docs/html
+EXTENSIONS = ./extensions
+DIST = ./dist
+DOCS = ./docs
+HTML = ./docs/html
+EASY_INSTALL_PTH = $(VIRTUAL_ENV)/lib/python2.7/site-packages/easy-install.pth
+PYTHON_VERSION = $$(python -V 2>&1 | cut -f2 -d' ' | cut -f1,2 -d'.' --output-delimiter='')
 
-.PHONY: clean aria-requirements docs-requirements docs
-.DEFAULT_GOAL = test
+.DEFAULT_GOAL = install
+.PHONY: clean install install-virtual docs test dist deploy
 
 clean:
-	rm -rf "$(HTML)" .tox .coverage*
+	rm -rf "$(DIST)" "$(HTML)" .tox .coverage*
 	-find . -type f -name '.coverage' -delete
 	-find . -type d -name '.coverage' -exec rm -rf {} \; 2>/dev/null
 	-find . -type d -name '*.egg-info' -exec rm -rf {} \; 2>/dev/null
 
 install:
-	pip install --upgrade .
+	pip install .
 
-requirements:
-	pip install --upgrade --requirement requirements.txt
+install-virtual:
+	pip install --editable .
+	
+	# "pip install --editable" will not add our extensions to the path, so we will patch the virtualenv
+	EXTENSIONS_PATH="$$(head -n 1 "$(EASY_INSTALL_PTH)")/extensions" && \
+	if ! grep -Fxq "$$EXTENSIONS_PATH" "$(EASY_INSTALL_PTH)"; then \
+		echo "$$EXTENSIONS_PATH" >> "$(EASY_INSTALL_PTH)"; \
+	fi
 
-docs-requirements:
-	pip install --upgrade --requirement "$(DOCS)/requirements.txt"
-
-test-requirements:
-	pip install tox==2.5.0
-
-docs: docs-requirements requirements
+docs:
+	pip install --requirement "$(DOCS)/requirements.txt"
 	rm -rf "$(HTML)"
 	sphinx-build -b html "$(DOCS)" "$(HTML)"
 
-test: test-requirements requirements
-	PYTHONPATH="$(EXTENSIONS):$(PYTHONPATH)" tox
+test:
+	pip install --upgrade "tox>=2.7.0"
+	tox -e pylint_code
+	tox -e pylint_tests
+	tox -e py$(PYTHON_VERSION)
+	tox -e py$(PYTHON_VERSION)e2e
+
+dist: docs
+	python ./setup.py sdist bdist_wheel
+
+deploy:
+	pip install --upgrade "twine>=1.9.1"
+	gpg --detach-sign -a "$(DIST)"/*
+	twine upload "$(DIST)"/*
+
+./requirements.txt: ./requirements.in
+	pip install --upgrade "pip-tools>=1.9.0"
+	pip-compile --output-file ./requirements.txt ./requirements.in

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
deleted file mode 100644
index 6aee414..0000000
--- a/README.md
+++ /dev/null
@@ -1,120 +0,0 @@
-ARIA
-====
-
-[![Build Status](https://img.shields.io/travis/apache/incubator-ariatosca/master.svg)](https://travis-ci.org/apache/incubator-ariatosca)
-[![Appveyor Build Status](https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/incubator-ariatosca/master.svg)](https://ci.appveyor.com/project/ApacheSoftwareFoundation/incubator-ariatosca/history)
-[![License](https://img.shields.io/github/license/apache/incubator-ariatosca.svg)](http://www.apache.org/licenses/LICENSE-2.0)
-[![PyPI release](https://img.shields.io/pypi/v/ariatosca.svg)](https://pypi.python.org/pypi/ariatosca)
-![Python Versions](https://img.shields.io/pypi/pyversions/ariatosca.svg)
-![Wheel](https://img.shields.io/pypi/wheel/ariatosca.svg)
-![Contributors](https://img.shields.io/github/contributors/apache/incubator-ariatosca.svg)
-[![Open Pull Requests](https://img.shields.io/github/issues-pr/apache/incubator-ariatosca.svg)](https://github.com/apache/incubator-ariatosca/pulls)
-[![Closed Pull Requests](https://img.shields.io/github/issues-pr-closed-raw/apache/incubator-ariatosca.svg)](https://github.com/apache/incubator-ariatosca/pulls?q=is%3Apr+is%3Aclosed)
-
-
-What is ARIA?
-----------------
-
-[ARIA](http://ariatosca.incubator.apache.org/) is a an open-source, [TOSCA](https://www.oasis-open.org/committees/tosca/)-based, lightweight library and CLI for orchestration and for consumption by projects building TOSCA-based solutions for resources and services orchestration.
-
-ARIA can be utilized by any organization that wants to implement TOSCA-based orchestration in its solutions, whether a multi-cloud enterprise application, or an NFV or SDN solution for multiple virtual infrastructure managers.
-
-With ARIA, you can utilize TOSCA's cloud portability out-of-the-box, to develop, test and run your applications, from template to deployment.
-
-ARIA is an incubation project under the [Apache Software Foundation](https://www.apache.org/).
-
-
-Installation
-----------------
-
-ARIA is [available on PyPI](https://pypi.python.org/pypi/ariatosca).    
-
-To install ARIA directly from PyPI (using a `wheel`), use:
-
-    pip install aria
-
-
-To install ARIA from source, download the source tarball from [PyPI](https://pypi.python.org/pypi/ariatosca),
-extract it, and then when inside the extracted directory, use:
-
-    pip install .
-
-The source package comes along with relevant examples, documentation,
-`requirements.txt` (for installing specifically the frozen dependencies' versions with which ARIA was tested) and more.
-
-<br>
-Note that for the `pip install` commands mentioned above, you must use a privileged user, or use virtualenv.
-<br><br><br>
-
-ARIA itself is in a `wheel` format compatible with all platforms. 
-Some dependencies, however, might require compilation (based on a given platform), and therefore possibly some system dependencies are required as well.
-
-On Ubuntu or other Debian-based systems:
-
-	sudo apt install python-setuptools python-dev build-essential libssl-dev libffi-dev
-
-On Archlinux:
-
-	sudo pacman -S python-setuptools
-
-
-ARIA requires Python 2.6/2.7. Python 3+ is currently not supported.
-
-
-Getting Started
----------------
-
-This section will describe how to run a simple "Hello World" example.
-
-First, provide ARIA with the ARIA "hello world" service-template and name it (e.g. `my-service-template`):
-
-	aria service-templates store examples/hello-world/helloworld.yaml my-service-template
-	
-Now create a service based on this service-template and name it (e.g. `my-service`):
-	
-	aria services create my-service -t my-service-template
-	
-Finally, start an `install` workflow execution on `my-service` like so:
-
-	aria executions start install -s my-service
-
-<br>
-You should now have a simple web-server running on your local machine.
-You can try visiting http://localhost:9090 to view your deployed application.
-
-To uninstall and clean your environment, follow these steps:
-
-    aria executions start uninstall -s my-service
-    aria services delete my-service
-    aria service-templates delete my-service-template
-
-
-Contribution
-------------
-
-You are welcome and encouraged to participate and contribute to the ARIA project.
-
-Please see our guide to [Contributing to ARIA](https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA).
-
-Feel free to also provide feedback on the mailing lists (see [Resources](#user-content-resources) section).
-
-
-Resources
----------
-
-* [ARIA homepage](http://ariatosca.incubator.apache.org/)
-* [ARIA wiki](https://cwiki.apache.org/confluence/display/AriaTosca)
-* [Issue tracker](https://issues.apache.org/jira/browse/ARIA)
-
-* Dev mailing list: dev@ariatosca.incubator.apache.org
-* User mailing list: user@ariatosca.incubator.apache.org
-
-Subscribe by sending a mail to `<group>-subscribe@ariatosca.incubator.apache.org` (e.g. `dev-subscribe@ariatosca.incubator.apache.org`).
-See information on how to subscribe to mailing list [here](https://www.apache.org/foundation/mailinglists.html).
-
-For past correspondence, see the [dev mailing list archive](http://mail-archives.apache.org/mod_mbox/incubator-ariatosca-dev/).
-
-
-License
--------
-ARIA is licensed under the [Apache License 2.0](https://github.com/apache/incubator-ariatosca/blob/master/LICENSE).

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/README.rst
----------------------------------------------------------------------
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..8af13a5
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,140 @@
+ARIA
+====
+
+|Build Status| |Appveyor Build Status| |License| |PyPI release| |Python Versions| |Wheel|
+|Contributors| |Open Pull Requests| |Closed Pull Requests|
+
+
+What is ARIA?
+-------------
+
+`ARIA <http://ariatosca.incubator.apache.org/>`__ is a an open-source,
+`TOSCA <https://www.oasis-open.org/committees/tosca/>`__-based, lightweight library and CLI for
+orchestration and for consumption by projects building TOSCA-based solutions for resources and
+services orchestration.
+
+ARIA can be utilized by any organization that wants to implement TOSCA-based orchestration in its
+solutions, whether a multi-cloud enterprise application, or an NFV or SDN solution for multiple
+virtual infrastructure managers.
+
+With ARIA, you can utilize TOSCA's cloud portability out-of-the-box, to develop, test and run your
+applications, from template to deployment.
+
+ARIA is an incubation project under the `Apache Software Foundation <https://www.apache.org/>`__.
+
+
+Installation
+------------
+
+ARIA is `available on PyPI <https://pypi.python.org/pypi/ariatosca>`__.
+
+To install ARIA directly from PyPI (using a ``wheel``), use::
+
+    pip install aria
+
+To install ARIA from source, download the source tarball from
+`PyPI <https://pypi.python.org/pypi/ariatosca>`__, extract it, and then when inside the extracted
+directory, use::
+
+    pip install .
+
+The source package comes along with relevant examples, documentation, ``requirements.txt`` (for
+installing specifically the frozen dependencies' versions with which ARIA was tested) and more.
+
+Note that for the ``pip install`` commands mentioned above, you must use a privileged user, or use
+virtualenv.
+
+ARIA itself is in a ``wheel`` format compatible with all platforms. Some dependencies, however,
+might require compilation (based on a given platform), and therefore possibly some system
+dependencies are required as well.
+
+On Ubuntu or other Debian-based systems::
+
+    sudo apt install python-setuptools python-dev build-essential libssl-dev libffi-dev
+
+On Archlinux::
+
+    sudo pacman -S python-setuptools
+
+ARIA requires Python 2.6/2.7. Python 3+ is currently not supported.
+
+
+Getting Started
+---------------
+
+This section will describe how to run a simple "Hello World" example.
+
+First, provide ARIA with the ARIA "hello world" service-template and name it (e.g.
+``my-service-template``)::
+
+    aria service-templates store examples/hello-world/helloworld.yaml my-service-template
+
+Now create a service based on this service-template and name it (e.g. ``my-service``)::
+
+    aria services create my-service -t my-service-template
+
+Finally, start an ``install`` workflow execution on ``my-service`` like so::
+
+    aria executions start install -s my-service
+
+You should now have a simple web-server running on your local machine. You can try visiting
+``http://localhost:9090`` to view your deployed application.
+
+To uninstall and clean your environment, follow these steps::
+
+    aria executions start uninstall -s my-service
+    aria services delete my-service
+    aria service-templates delete my-service-template
+
+
+Contribution
+------------
+
+You are welcome and encouraged to participate and contribute to the ARIA project.
+
+Please see our guide to
+`Contributing to ARIA <https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA>`__.
+
+Feel free to also provide feedback on the mailing lists (see `Resources <#user-content-resources>`__
+section).
+
+
+Resources
+---------
+
+-  `ARIA homepage <http://ariatosca.incubator.apache.org/>`__
+-  `ARIA wiki <https://cwiki.apache.org/confluence/display/AriaTosca>`__
+-  `Issue tracker <https://issues.apache.org/jira/browse/ARIA>`__
+
+-  Dev mailing list: dev@ariatosca.incubator.apache.org
+-  User mailing list: user@ariatosca.incubator.apache.org
+
+Subscribe by sending a mail to ``<group>-subscribe@ariatosca.incubator.apache.org`` (e.g.
+``dev-subscribe@ariatosca.incubator.apache.org``). See information on how to subscribe to mailing
+lists `here <https://www.apache.org/foundation/mailinglists.html>`__.
+
+For past correspondence, see the
+`dev mailing list archive <http://mail-archives.apache.org/mod_mbox/incubator-ariatosca-dev/>`__.
+
+
+License
+-------
+
+ARIA is licensed under the
+`Apache License 2.0 <https://github.com/apache/incubator-ariatosca/blob/master/LICENSE>`__.
+
+.. |Build Status| image:: https://img.shields.io/travis/apache/incubator-ariatosca/master.svg
+   :target: https://travis-ci.org/apache/incubator-ariatosca
+.. |Appveyor Build Status| image:: https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/incubator-ariatosca/master.svg
+   :target: https://ci.appveyor.com/project/ApacheSoftwareFoundation/incubator-ariatosca/history
+.. |License| image:: https://img.shields.io/github/license/apache/incubator-ariatosca.svg
+   :target: http://www.apache.org/licenses/LICENSE-2.0
+.. |PyPI release| image:: https://img.shields.io/pypi/v/ariatosca.svg
+   :target: https://pypi.python.org/pypi/ariatosca
+.. |Python Versions| image:: https://img.shields.io/pypi/pyversions/ariatosca.svg
+.. |Wheel| image:: https://img.shields.io/pypi/wheel/ariatosca.svg
+.. |Contributors| image:: https://img.shields.io/github/contributors/apache/incubator-ariatosca.svg
+.. |Open Pull Requests| image:: https://img.shields.io/github/issues-pr/apache/incubator-ariatosca.svg
+   :target: https://github.com/apache/incubator-ariatosca/pulls
+.. |Closed Pull Requests| image:: https://img.shields.io/github/issues-pr-closed-raw/apache/incubator-ariatosca.svg
+   :target: https://github.com/apache/incubator-ariatosca/pulls?q=is%3Apr+is%3Aclosed

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/TODO.md
----------------------------------------------------------------------
diff --git a/TODO.md b/TODO.md
deleted file mode 100644
index ab32cf7..0000000
--- a/TODO.md
+++ /dev/null
@@ -1,8 +0,0 @@
-- aria/parser/extension_tools.py:66:    #  todo: maybe add replace action and check in add that we don't replace...
-- aria/parser/framework/elements/policies.py:128:    #  TODO: policies should be implemented according to TOSCA as generic types
-- aria/parser/framework/elements/node_templates.py:42:    #  TODO: Capabilities should be implemented according to TOSCA as generic types
-- aria/parser/framework/functions.py:25:    #  TODO: ugly huck for now..., sort the imports when you have time
-- aria/parser/framework/parser.py:258:    #  TODO: need to clean up
-- tests/parser/test_parser_api.py:430:    #  TODO: assert node-type's default and description values once
-- tests/parser/test_parser_api.py:450:    #  TODO: assert type's default and description values once 'type' is
-- tests/parser/test_parser_api.py:472:    #  TODO: assert type's default and description values once 'type' is

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/docs/conf.py
----------------------------------------------------------------------
diff --git a/docs/conf.py b/docs/conf.py
index cd25279..e557f02 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -36,7 +36,8 @@ import sys
 sys.path.append(os.path.abspath('../aria'))
 sys.path.append(os.path.abspath('../extensions'))
 
-execfile(os.path.join('../aria', 'VERSION.py'))
+with open('../VERSION') as f:
+    version = f.readline()
 
 # -- General configuration ------------------------------------------------
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/docs/requirements.txt
----------------------------------------------------------------------
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 72b28f1..976c5b6 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -10,5 +10,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-Sphinx==1.5.3
-sphinx_rtd_theme==0.2.4
+Sphinx>=1.6.2, <2.0.0
+sphinx_rtd_theme>=0.2.4, <1.0.0

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e6cf67ec/setup.py
----------------------------------------------------------------------
diff --git a/setup.py b/setup.py
index d43ce91..8d5f463 100644
--- a/setup.py
+++ b/setup.py
@@ -39,6 +39,9 @@ root_dir = os.path.dirname(__file__)
 with open(os.path.join(root_dir, 'VERSION')) as version_file:
     __version__ = version_file.read().strip()
 
+with open(os.path.join(root_dir, 'README.rst')) as readme:
+    long_description = readme.read()
+
 install_requires = []
 extras_require = {}
 
@@ -106,10 +109,13 @@ setup(
     name=_PACKAGE_NAME,
     version=__version__,
     description='ARIA',
+    long_description=long_description,
     license='Apache License 2.0',
     author='aria',
     author_email='dev@ariatosca.incubator.apache.org',
-    url='http://ariatosca.org',
+    url='http://ariatosca.incubator.apache.org/',
+    download_url=(
+        'https://dist.apache.org/repos/dist/release/incubator/ariatosca/' + __version__),
     classifiers=[
         'Development Status :: 4 - Beta',
         'Environment :: Console',


[2/4] incubator-ariatosca git commit: ARIA-281 Update click library version

Posted by mx...@apache.org.
ARIA-281 Update click library version

Updated click library version to a newer version.
This will also make it compatible in the same environment with the
pip-tools library which uses click too and requires the newer version.

Additional changes:
 - Removed PyYAML dependency from requirements.txt (leftover)
 - Disabled default goal of Makefile


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/99075202
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/99075202
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/99075202

Branch: refs/heads/ARIA-278-Remove-core-tasks
Commit: 990752026d3316286aff53b388eaa9999a47b804
Parents: e6cf67e
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Tue Jun 20 15:47:09 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Jun 20 15:47:09 2017 +0300

----------------------------------------------------------------------
 Makefile         | 5 ++++-
 requirements.in  | 2 +-
 requirements.txt | 5 ++---
 3 files changed, 7 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/99075202/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index a857ca7..cb4b58f 100644
--- a/Makefile
+++ b/Makefile
@@ -20,9 +20,12 @@ HTML = ./docs/html
 EASY_INSTALL_PTH = $(VIRTUAL_ENV)/lib/python2.7/site-packages/easy-install.pth
 PYTHON_VERSION = $$(python -V 2>&1 | cut -f2 -d' ' | cut -f1,2 -d'.' --output-delimiter='')
 
-.DEFAULT_GOAL = install
+.DEFAULT_GOAL = default
 .PHONY: clean install install-virtual docs test dist deploy
 
+default:
+	@echo "Please choose one of the following targets: clean, install, install-virtual, docs, test, dist, deploy, requirements.txt"
+
 clean:
 	rm -rf "$(DIST)" "$(HTML)" .tox .coverage*
 	-find . -type f -name '.coverage' -delete

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/99075202/requirements.in
----------------------------------------------------------------------
diff --git a/requirements.in b/requirements.in
index 54e8714..d205c7a 100644
--- a/requirements.in
+++ b/requirements.in
@@ -28,7 +28,7 @@ wagon==0.6.0
 bottle>=0.12.0, <0.13
 Fabric>=1.13.0, <1.14
 setuptools>=35.0.0, <36.0.0
-click>=4.1, < 5.0
+click>=6.0, < 7.0
 colorama>=0.3.7, <=0.3.9
 PrettyTable>=0.7,<0.8
 click_didyoumean==0.0.3

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/99075202/requirements.txt
----------------------------------------------------------------------
diff --git a/requirements.txt b/requirements.txt
index 8551c65..6cf2ade 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,7 +2,7 @@
 # This file is autogenerated by pip-compile
 # To update, run:
 #
-#    pip-compile --output-file requirements.txt requirements.in
+#    pip-compile --output-file ./requirements.txt ./requirements.in
 #
 # Since the tool we are using to generate our requirements.txt, `pip-tools`,
 # does not currently support conditional dependencies (;), we're adding our original
@@ -25,7 +25,7 @@ blinker==1.4
 bottle==0.12.13
 cachecontrol[filecache]==0.12.1
 cffi==1.10.0              # via cryptography
-click==4.1
+click==6.7
 click_didyoumean==0.0.3
 clint==0.5.1
 colorama==0.3.9
@@ -48,7 +48,6 @@ prettytable==0.7.2
 pyasn1==0.2.3             # via paramiko
 pycparser==2.17           # via cffi
 pyparsing==2.2.0          # via packaging
-pyyaml==3.12
 requests==2.13.0
 retrying==1.3.3
 ruamel.ordereddict==0.4.9  # via ruamel.yaml


[3/4] incubator-ariatosca git commit: ARIA-278 remove core tasks

Posted by mx...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
index 5dd2855..f5fb17a 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -13,12 +13,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from networkx import topological_sort, DiGraph
+from networkx import topological_sort
 
+from aria.modeling import models
 from aria.orchestrator import context
-from aria.orchestrator.workflows import api, core
+from aria.orchestrator.workflows import api
+from aria.orchestrator.workflows.core import compile
 from aria.orchestrator.workflows.executor import base
-
 from tests import mock
 from tests import storage
 
@@ -26,8 +27,8 @@ from tests import storage
 def test_task_graph_into_execution_graph(tmpdir):
     interface_name = 'Standard'
     operation_name = 'create'
-    task_context = mock.context.simple(str(tmpdir))
-    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    workflow_context = mock.context.simple(str(tmpdir))
+    node = workflow_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
         interface_name,
@@ -35,12 +36,12 @@ def test_task_graph_into_execution_graph(tmpdir):
         operation_kwargs=dict(function='test')
     )
     node.interfaces[interface.name] = interface
-    task_context.model.node.update(node)
+    workflow_context.model.node.update(node)
 
     def sub_workflow(name, **_):
         return api.task_graph.TaskGraph(name)
 
-    with context.workflow.current.push(task_context):
+    with context.workflow.current.push(workflow_context):
         test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
         simple_before_task = api.task.OperationTask(
             node,
@@ -64,12 +65,9 @@ def test_task_graph_into_execution_graph(tmpdir):
     test_task_graph.add_dependency(inner_task_graph, simple_before_task)
     test_task_graph.add_dependency(simple_after_task, inner_task_graph)
 
-    # Direct check
-    execution_graph = DiGraph()
-    core.translation.build_execution_graph(task_graph=test_task_graph,
-                                           execution_graph=execution_graph,
-                                           default_executor=base.StubTaskExecutor())
-    execution_tasks = topological_sort(execution_graph)
+    compile.create_execution_tasks(workflow_context, test_task_graph, base.StubTaskExecutor)
+
+    execution_tasks = topological_sort(workflow_context._graph)
 
     assert len(execution_tasks) == 7
 
@@ -83,30 +81,23 @@ def test_task_graph_into_execution_graph(tmpdir):
         '{0}-End'.format(test_task_graph.id)
     ]
 
-    assert expected_tasks_names == execution_tasks
-
-    assert isinstance(_get_task_by_name(execution_tasks[0], execution_graph),
-                      core.task.StartWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[1], execution_graph),
-                                  simple_before_task)
-    assert isinstance(_get_task_by_name(execution_tasks[2], execution_graph),
-                      core.task.StartSubWorkflowTask)
+    assert expected_tasks_names == [t._api_id for t in execution_tasks]
+    assert all(isinstance(task, models.Task) for task in execution_tasks)
+    execution_tasks = iter(execution_tasks)
 
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[3], execution_graph),
-                                  inner_task)
-    assert isinstance(_get_task_by_name(execution_tasks[4], execution_graph),
-                      core.task.EndSubWorkflowTask)
+    assert next(execution_tasks)._stub_type == models.Task.START_WORKFLOW
+    _assert_execution_is_api_task(next(execution_tasks), simple_before_task)
+    assert next(execution_tasks)._stub_type == models.Task.START_SUBWROFKLOW
+    _assert_execution_is_api_task(next(execution_tasks), inner_task)
+    assert next(execution_tasks)._stub_type == models.Task.END_SUBWORKFLOW
+    _assert_execution_is_api_task(next(execution_tasks), simple_after_task)
+    assert next(execution_tasks)._stub_type == models.Task.END_WORKFLOW
 
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[5], execution_graph),
-                                  simple_after_task)
-    assert isinstance(_get_task_by_name(execution_tasks[6], execution_graph),
-                      core.task.EndWorkflowTask)
-    storage.release_sqlite_storage(task_context.model)
+    storage.release_sqlite_storage(workflow_context.model)
 
 
 def _assert_execution_is_api_task(execution_task, api_task):
-    assert execution_task.id == api_task.id
+    assert execution_task._api_id == api_task.id
     assert execution_task.name == api_task.name
     assert execution_task.function == api_task.function
     assert execution_task.actor == api_task.actor

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/executor/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/__init__.py b/tests/orchestrator/workflows/executor/__init__.py
index ac6d325..83584a6 100644
--- a/tests/orchestrator/workflows/executor/__init__.py
+++ b/tests/orchestrator/workflows/executor/__init__.py
@@ -12,69 +12,80 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import uuid
 import logging
-from collections import namedtuple
+import uuid
 from contextlib import contextmanager
 
 import aria
 from aria.modeling import models
 
 
+class MockContext(object):
+
+    def __init__(self, storage, task_kwargs=None):
+        self.logger = logging.getLogger('mock_logger')
+        self._task_kwargs = task_kwargs or {}
+        self._storage = storage
+        self.task = MockTask(storage, **task_kwargs)
+        self.states = []
+        self.exception = None
+
+    @property
+    def serialization_dict(self):
+        return {
+            'context_cls': self.__class__,
+            'context': {
+                'storage_kwargs': self._storage.serialization_dict,
+                'task_kwargs': self._task_kwargs
+            }
+        }
+
+    def __getattr__(self, item):
+        return None
+
+    def close(self):
+        pass
+
+    @classmethod
+    def instantiate_from_dict(cls, storage_kwargs=None, task_kwargs=None):
+        return cls(storage=aria.application_model_storage(**(storage_kwargs or {})),
+                   task_kwargs=(task_kwargs or {}))
+
+    @property
+    @contextmanager
+    def persist_changes(self):
+        yield
+
+
+class MockActor(object):
+    def __init__(self):
+        self.name = 'actor_name'
+
+
 class MockTask(object):
 
     INFINITE_RETRIES = models.Task.INFINITE_RETRIES
 
-    def __init__(self, function, arguments=None, plugin=None, storage=None):
+    def __init__(self, model, function, arguments=None, plugin_fk=None):
         self.function = self.name = function
-        self.plugin_fk = plugin.id if plugin else None
-        self.plugin = plugin or None
+        self.plugin_fk = plugin_fk
         self.arguments = arguments or {}
         self.states = []
         self.exception = None
         self.id = str(uuid.uuid4())
         self.logger = logging.getLogger()
-        self.context = MockContext(storage)
         self.attempts_count = 1
         self.max_attempts = 1
         self.ignore_failure = False
         self.interface_name = 'interface_name'
         self.operation_name = 'operation_name'
-        self.actor = namedtuple('actor', 'name')(name='actor_name')
-        self.model_task = None
+        self.actor = MockActor()
+        self.node = self.actor
+        self.model = model
 
         for state in models.Task.STATES:
             setattr(self, state.upper(), state)
 
-    @contextmanager
-    def _update(self):
-        yield self
-
-
-class MockContext(object):
-
-    def __init__(self, storage=None):
-        self.logger = logging.getLogger('mock_logger')
-        self.task = type('SubprocessMockTask', (object, ), {'plugin': None})
-        self.model = storage
-
     @property
-    def serialization_dict(self):
-        if self.model:
-            return {'context': self.model.serialization_dict, 'context_cls': self.__class__}
-        else:
-            return {'context_cls': self.__class__, 'context': {}}
-
-    def __getattr__(self, item):
-        return None
-
-    @classmethod
-    def instantiate_from_dict(cls, **kwargs):
-        if kwargs:
-            return cls(storage=aria.application_model_storage(**kwargs))
-        else:
-            return cls()
-
-    @staticmethod
-    def close():
-        pass
+    def plugin(self):
+        return self.model.plugin.get(self.plugin_fk) if self.plugin_fk else None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 3079c60..32a68e0 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -35,7 +35,7 @@ from aria.orchestrator.workflows.executor import (
 )
 
 import tests
-from . import MockTask
+from . import MockContext
 
 
 def _get_function(func):
@@ -44,11 +44,17 @@ def _get_function(func):
 
 def execute_and_assert(executor, storage=None):
     expected_value = 'value'
-    successful_task = MockTask(_get_function(mock_successful_task), storage=storage)
-    failing_task = MockTask(_get_function(mock_failing_task), storage=storage)
-    task_with_inputs = MockTask(_get_function(mock_task_with_input),
-                                arguments={'input': models.Argument.wrap('input', 'value')},
-                                storage=storage)
+    successful_task = MockContext(
+        storage, task_kwargs=dict(function=_get_function(mock_successful_task))
+    )
+    failing_task = MockContext(
+        storage, task_kwargs=dict(function=_get_function(mock_failing_task))
+    )
+    task_with_inputs = MockContext(
+        storage,
+        task_kwargs=dict(function=_get_function(mock_task_with_input),
+                         arguments={'input': models.Argument.wrap('input', 'value')})
+    )
 
     for task in [successful_task, failing_task, task_with_inputs]:
         executor.execute(task)
@@ -95,10 +101,10 @@ class MockException(Exception):
 
 @pytest.fixture
 def storage(tmpdir):
-    return aria.application_model_storage(
-        aria.storage.sql_mapi.SQLAlchemyModelAPI,
-        initiator_kwargs=dict(base_dir=str(tmpdir))
-    )
+    _storage = aria.application_model_storage(aria.storage.sql_mapi.SQLAlchemyModelAPI,
+                                              initiator_kwargs=dict(base_dir=str(tmpdir)))
+    yield _storage
+    tests.storage.release_sqlite_storage(_storage)
 
 
 @pytest.fixture(params=[

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 058190e..755b9be 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -28,15 +28,17 @@ import tests.resources
 from tests.fixtures import (  # pylint: disable=unused-import
     plugins_dir,
     plugin_manager,
-    fs_model as model
 )
-from . import MockTask
+from . import MockContext
 
 
 class TestProcessExecutor(object):
 
-    def test_plugin_execution(self, executor, mock_plugin, storage):
-        task = MockTask('mock_plugin1.operation', plugin=mock_plugin, storage=storage)
+    def test_plugin_execution(self, executor, mock_plugin, model):
+        ctx = MockContext(
+            model,
+            task_kwargs=dict(function='mock_plugin1.operation', plugin_fk=mock_plugin.id)
+        )
 
         queue = Queue.Queue()
 
@@ -46,7 +48,7 @@ class TestProcessExecutor(object):
         events.on_success_task_signal.connect(handler)
         events.on_failure_task_signal.connect(handler)
         try:
-            executor.execute(task)
+            executor.execute(ctx)
             error = queue.get(timeout=60)
             # tests/resources/plugins/mock-plugin1 is the plugin installed
             # during this tests setup. The module mock_plugin1 contains a single
@@ -63,10 +65,10 @@ class TestProcessExecutor(object):
             events.on_success_task_signal.disconnect(handler)
             events.on_failure_task_signal.disconnect(handler)
 
-    def test_closed(self, executor):
+    def test_closed(self, executor, model):
         executor.close()
         with pytest.raises(RuntimeError) as exc_info:
-            executor.execute(task=MockTask(function='some.function'))
+            executor.execute(MockContext(model, task_kwargs=dict(function='some.function')))
         assert 'closed' in exc_info.value.message
 
 
@@ -85,8 +87,8 @@ def mock_plugin(plugin_manager, tmpdir):
 
 
 @pytest.fixture
-def storage(tmpdir):
-    return aria.application_model_storage(
-        aria.storage.sql_mapi.SQLAlchemyModelAPI,
-        initiator_kwargs=dict(base_dir=str(tmpdir))
-    )
+def model(tmpdir):
+    _storage = aria.application_model_storage(aria.storage.sql_mapi.SQLAlchemyModelAPI,
+                                              initiator_kwargs=dict(base_dir=str(tmpdir)))
+    yield _storage
+    tests.storage.release_sqlite_storage(_storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 5f0b75f..ba98c4f 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -17,7 +17,7 @@ import pytest
 
 from aria import extension
 from aria.orchestrator.workflows import api
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 from aria.orchestrator.workflows.executor import process
 from aria.orchestrator import workflow, operation
 
@@ -57,8 +57,9 @@ def test_decorate_extension(context, executor):
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
-    eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
-    eng.execute()
+    compile.create_execution_tasks(context, graph, executor.__class__)
+    eng = engine.Engine({executor.__class__: executor})
+    eng.execute(context)
     out = get_node(context).attributes.get('out').value
     assert out['wrapper_arguments'] == arguments
     assert out['function_arguments'] == arguments

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index 7dbcc5a..2f1c325 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -18,7 +18,7 @@ import copy
 import pytest
 
 from aria.orchestrator.workflows import api
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 from aria.orchestrator.workflows.executor import process
 from aria.orchestrator import workflow, operation
 from aria.orchestrator.workflows import exceptions
@@ -107,8 +107,9 @@ def _run_workflow(context, executor, op_func, arguments=None):
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
-    eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
-    eng.execute()
+    compile.create_execution_tasks(context, graph, executor.__class__)
+    eng = engine.Engine({executor.__class__: executor})
+    eng.execute(context)
     out = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME).attributes.get('out')
     return out.value if out else None
 


[4/4] incubator-ariatosca git commit: ARIA-278 remove core tasks

Posted by mx...@apache.org.
ARIA-278 remove core tasks


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/1fee85c4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/1fee85c4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/1fee85c4

Branch: refs/heads/ARIA-278-Remove-core-tasks
Commit: 1fee85c4193c635d8598affbf769d306917760d8
Parents: 9907520
Author: max-orlov <ma...@gigaspaces.com>
Authored: Sun Jun 11 19:05:35 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue Jun 20 19:14:41 2017 +0300

----------------------------------------------------------------------
 aria/modeling/orchestration.py                  | 143 +++++++---
 aria/orchestrator/context/operation.py          |   7 +
 aria/orchestrator/context/workflow.py           |  21 ++
 aria/orchestrator/workflow_runner.py            |  22 +-
 aria/orchestrator/workflows/api/task.py         |   7 +
 aria/orchestrator/workflows/core/__init__.py    |   2 +-
 aria/orchestrator/workflows/core/compile.py     | 116 ++++++++
 aria/orchestrator/workflows/core/engine.py      | 121 +++++----
 .../workflows/core/events_handler.py            | 137 +++++-----
 aria/orchestrator/workflows/core/task.py        | 271 -------------------
 aria/orchestrator/workflows/core/translation.py | 109 --------
 aria/orchestrator/workflows/events_logging.py   |  25 +-
 aria/orchestrator/workflows/executor/base.py    |  31 ++-
 aria/orchestrator/workflows/executor/dry.py     |  57 ++--
 aria/orchestrator/workflows/executor/process.py |  18 +-
 aria/orchestrator/workflows/executor/thread.py  |  18 +-
 tests/__init__.py                               |   2 +
 tests/orchestrator/context/__init__.py          |   9 +-
 tests/orchestrator/context/test_operation.py    |  26 +-
 tests/orchestrator/context/test_serialize.py    |  10 +-
 .../orchestrator/execution_plugin/test_local.py |  10 +-
 tests/orchestrator/execution_plugin/test_ssh.py |  11 +-
 tests/orchestrator/test_workflow_runner.py      |  43 ++-
 .../orchestrator/workflows/core/test_engine.py  |  23 +-
 .../orchestrator/workflows/core/test_events.py  |  20 +-
 tests/orchestrator/workflows/core/test_task.py  |  31 +--
 .../test_task_graph_into_execution_graph.py     |  55 ++--
 .../orchestrator/workflows/executor/__init__.py |  89 +++---
 .../workflows/executor/test_executor.py         |  26 +-
 .../workflows/executor/test_process_executor.py |  26 +-
 .../executor/test_process_executor_extension.py |   7 +-
 .../test_process_executor_tracked_changes.py    |   7 +-
 32 files changed, 677 insertions(+), 823 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index 995c8c2..17d2476 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -21,7 +21,6 @@ classes:
 """
 
 # pylint: disable=no-self-argument, no-member, abstract-method
-
 from datetime import datetime
 
 from sqlalchemy import (
@@ -34,19 +33,19 @@ from sqlalchemy import (
     String,
     Float,
     orm,
-)
+    PickleType)
 from sqlalchemy.ext.associationproxy import association_proxy
 from sqlalchemy.ext.declarative import declared_attr
 
 from ..orchestrator.exceptions import (TaskAbortException, TaskRetryException)
-from .mixins import ModelMixin, ParameterMixin
+from . import mixins
 from . import (
     relationship,
     types as modeling_types
 )
 
 
-class ExecutionBase(ModelMixin):
+class ExecutionBase(mixins.ModelMixin):
     """
     Execution model representation.
     """
@@ -152,7 +151,7 @@ class ExecutionBase(ModelMixin):
         )
 
 
-class PluginBase(ModelMixin):
+class PluginBase(mixins.ModelMixin):
     """
     An installed plugin.
 
@@ -213,7 +212,7 @@ class PluginBase(ModelMixin):
     uploaded_at = Column(DateTime, nullable=False, index=True)
 
 
-class TaskBase(ModelMixin):
+class TaskBase(mixins.ModelMixin):
     """
     Represents the smallest unit of stateful execution in ARIA. The task state includes inputs,
     outputs, as well as an atomic status, ensuring that the task can only be running once at any
@@ -257,10 +256,24 @@ class TaskBase(ModelMixin):
 
     __tablename__ = 'task'
 
-    __private_fields__ = ['node_fk',
-                          'relationship_fk',
-                          'plugin_fk',
-                          'execution_fk']
+    __private_fields__ = ['dependency_operation_task_fk', 'dependency_stub_task_fk', 'node_fk',
+                          'relationship_fk', 'plugin_fk', 'execution_fk']
+
+    START_WORKFLOW = 'start_workflow'
+    END_WORKFLOW = 'end_workflow'
+    START_SUBWROFKLOW = 'start_subworkflow'
+    END_SUBWORKFLOW = 'end_subworkflow'
+    STUB = 'stub'
+    CONDITIONAL = 'conditional'
+
+    STUB_TYPES = (
+        START_WORKFLOW,
+        START_SUBWROFKLOW,
+        END_WORKFLOW,
+        END_SUBWORKFLOW,
+        STUB,
+        CONDITIONAL,
+    )
 
     PENDING = 'pending'
     RETRYING = 'retrying'
@@ -276,10 +289,28 @@ class TaskBase(ModelMixin):
         SUCCESS,
         FAILED,
     )
-
     INFINITE_RETRIES = -1
 
     @declared_attr
+    def execution(cls):
+        return relationship.many_to_one(cls, 'execution')
+
+    @declared_attr
+    def execution_fk(cls):
+        return relationship.foreign_key('execution', nullable=True)
+
+    status = Column(Enum(*STATES, name='status'), default=PENDING)
+    due_at = Column(DateTime, nullable=False, index=True, default=datetime.utcnow())
+    started_at = Column(DateTime, default=None)
+    ended_at = Column(DateTime, default=None)
+    attempts_count = Column(Integer, default=1)
+
+    _api_id = Column(String)
+    _executor = Column(PickleType)
+    _context_cls = Column(PickleType)
+    _stub_type = Column(Enum(*STUB_TYPES))
+
+    @declared_attr
     def logs(cls):
         return relationship.one_to_many(cls, 'log')
 
@@ -296,10 +327,6 @@ class TaskBase(ModelMixin):
         return relationship.many_to_one(cls, 'plugin')
 
     @declared_attr
-    def execution(cls):
-        return relationship.many_to_one(cls, 'execution')
-
-    @declared_attr
     def arguments(cls):
         return relationship.one_to_many(cls, 'argument', dict_key='name')
 
@@ -307,19 +334,8 @@ class TaskBase(ModelMixin):
     max_attempts = Column(Integer, default=1)
     retry_interval = Column(Float, default=0)
     ignore_failure = Column(Boolean, default=False)
-
-    # State
-    status = Column(Enum(*STATES, name='status'), default=PENDING)
-    due_at = Column(DateTime, nullable=False, index=True, default=datetime.utcnow())
-    started_at = Column(DateTime, default=None)
-    ended_at = Column(DateTime, default=None)
-    attempts_count = Column(Integer, default=1)
-
-    def has_ended(self):
-        return self.status in (self.SUCCESS, self.FAILED)
-
-    def is_waiting(self):
-        return self.status in (self.PENDING, self.RETRYING)
+    interface_name = Column(String)
+    operation_name = Column(String)
 
     @property
     def actor(self):
@@ -351,10 +367,6 @@ class TaskBase(ModelMixin):
     def plugin_fk(cls):
         return relationship.foreign_key('plugin', nullable=True)
 
-    @declared_attr
-    def execution_fk(cls):
-        return relationship.foreign_key('execution', nullable=True)
-
     # endregion
 
     # region association proxies
@@ -376,14 +388,6 @@ class TaskBase(ModelMixin):
 
     # endregion
 
-    @classmethod
-    def for_node(cls, actor, **kwargs):
-        return cls(node=actor, **kwargs)
-
-    @classmethod
-    def for_relationship(cls, actor, **kwargs):
-        return cls(relationship=actor, **kwargs)
-
     @staticmethod
     def abort(message=None):
         raise TaskAbortException(message)
@@ -392,8 +396,63 @@ class TaskBase(ModelMixin):
     def retry(message=None, retry_interval=None):
         raise TaskRetryException(message, retry_interval=retry_interval)
 
+    @declared_attr
+    def dependency_fk(self):
+        return relationship.foreign_key('task', nullable=True)
+
+    @declared_attr
+    def dependencies(cls):
+        # symmetric relationship causes funky graphs
+        return relationship.one_to_many_self(cls, 'dependency_fk')
+
+    def has_ended(self):
+        return self.status in (self.SUCCESS, self.FAILED)
+
+    def is_waiting(self):
+        if self._stub_type:
+            return not self.has_ended()
+        else:
+            return self.status in (self.PENDING, self.RETRYING)
+
+    @classmethod
+    def from_api_task(cls, api_task, executor, **kwargs):
+        instantiation_kwargs = {}
+
+        if hasattr(api_task.actor, 'outbound_relationships'):
+            instantiation_kwargs['node'] = api_task.actor
+        elif hasattr(api_task.actor, 'source_node'):
+            instantiation_kwargs['relationship'] = api_task.actor
+        else:
+            raise RuntimeError('No operation context could be created for {actor.model_cls}'
+                               .format(actor=api_task.actor))
+
+        instantiation_kwargs.update(
+            {
+                'name': api_task.name,
+                'status': cls.PENDING,
+                'max_attempts': api_task.max_attempts,
+                'retry_interval': api_task.retry_interval,
+                'ignore_failure': api_task.ignore_failure,
+                'execution': api_task._workflow_context.execution,
+                'interface_name': api_task.interface_name,
+                'operation_name': api_task.operation_name,
+
+                # Only non-stub tasks have these fields
+                'plugin': api_task.plugin,
+                'function': api_task.function,
+                'arguments': api_task.arguments,
+                '_api_id': api_task.id,
+                '_context_cls': api_task._context_cls,
+                '_executor': executor,
+            }
+        )
+
+        instantiation_kwargs.update(**kwargs)
+
+        return cls(**instantiation_kwargs)
+
 
-class LogBase(ModelMixin):
+class LogBase(mixins.ModelMixin):
 
     __tablename__ = 'log'
 
@@ -435,7 +494,7 @@ class LogBase(ModelMixin):
         return '{name}: {self.msg}'.format(name=name, self=self)
 
 
-class ArgumentBase(ParameterMixin):
+class ArgumentBase(mixins.ParameterMixin):
 
     __tablename__ = 'argument'
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index efdc04d..d43b847 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -18,6 +18,7 @@ Workflow and operation contexts
 """
 
 import threading
+from contextlib import contextmanager
 
 import aria
 from aria.utils import file
@@ -106,6 +107,12 @@ class BaseOperationContext(common.BaseContext):
             self.model.log._session.remove()
             self.model.log._engine.dispose()
 
+    @property
+    @contextmanager
+    def persist_changes(self):
+        yield
+        self.model.task.update(self.task)
+
 
 class NodeOperationContext(BaseOperationContext):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 920b237..aa5a786 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -20,6 +20,8 @@ Workflow and operation contexts
 import threading
 from contextlib import contextmanager
 
+from networkx import DiGraph
+
 from .exceptions import ContextException
 from .common import BaseContext
 
@@ -41,6 +43,7 @@ class WorkflowContext(BaseContext):
         self._task_max_attempts = task_max_attempts
         self._task_retry_interval = task_retry_interval
         self._task_ignore_failure = task_ignore_failure
+        self._execution_graph = None
         self._register_logger()
 
     def __repr__(self):
@@ -92,6 +95,24 @@ class WorkflowContext(BaseContext):
             }
         )
 
+    @property
+    def _graph(self):
+        if self._execution_graph is None:
+            graph = DiGraph()
+            for task in self.execution.tasks:
+                for dependency in task.dependencies:
+                    graph.add_edge(dependency, task)
+
+            self._execution_graph = graph
+
+        return self._execution_graph
+
+    @property
+    @contextmanager
+    def persist_changes(self):
+        yield
+        self._model.execution.update(self.execution)
+
 
 class _CurrentContext(threading.local):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 848c59b..9e6b3ad 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -24,7 +24,7 @@ from datetime import datetime
 from . import exceptions
 from .context.workflow import WorkflowContext
 from .workflows import builtin
-from .workflows.core.engine import Engine
+from .workflows.core import engine, compile
 from .workflows.executor.process import ProcessExecutor
 from ..modeling import models
 from ..modeling import utils as modeling_utils
@@ -70,7 +70,7 @@ class WorkflowRunner(object):
         execution = self._create_execution_model(inputs)
         self._execution_id = execution.id
 
-        workflow_context = WorkflowContext(
+        self._workflow_context = WorkflowContext(
             name=self.__class__.__name__,
             model_storage=self._model_storage,
             resource_storage=resource_storage,
@@ -80,15 +80,17 @@ class WorkflowRunner(object):
             task_max_attempts=task_max_attempts,
             task_retry_interval=task_retry_interval)
 
+        # Set default executor and kwargs
+        executor = executor or ProcessExecutor(plugin_manager=plugin_manager)
+
         # transforming the execution inputs to dict, to pass them to the workflow function
         execution_inputs_dict = dict(inp.unwrapped for inp in self.execution.inputs.values())
-        self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict)
 
-        executor = executor or ProcessExecutor(plugin_manager=plugin_manager)
-        self._engine = Engine(
-            executor=executor,
-            workflow_context=workflow_context,
-            tasks_graph=self._tasks_graph)
+        self._tasks_graph = workflow_fn(ctx=self._workflow_context, **execution_inputs_dict)
+        compile.create_execution_tasks(
+            self._workflow_context, self._tasks_graph, executor.__class__)
+
+        self._engine = engine.Engine(executors={executor.__class__: executor})
 
     @property
     def execution_id(self):
@@ -103,10 +105,10 @@ class WorkflowRunner(object):
         return self._model_storage.service.get(self._service_id)
 
     def execute(self):
-        self._engine.execute()
+        self._engine.execute(ctx=self._workflow_context)
 
     def cancel(self):
-        self._engine.cancel_execution()
+        self._engine.cancel_execution(ctx=self._workflow_context)
 
     def _create_execution_model(self, inputs):
         execution = models.Execution(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index ca125a8..f7d2c66 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -140,6 +140,13 @@ class OperationTask(BaseTask):
         self.arguments = modeling_utils.merge_parameter_values(arguments,
                                                                operation.arguments,
                                                                model_cls=models.Argument)
+        if getattr(self.actor, 'outbound_relationships', None) is not None:
+            self._context_cls = context.operation.NodeOperationContext
+        elif getattr(self.actor, 'source_node', None) is not None:
+            self._context_cls = context.operation.RelationshipOperationContext
+        else:
+            raise exceptions.TaskCreationException('Could not locate valid context for '
+                                                   '{actor.__class__}'.format(actor=self.actor))
 
     def __repr__(self):
         return self.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/core/__init__.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/__init__.py b/aria/orchestrator/workflows/core/__init__.py
index e377153..81db43f 100644
--- a/aria/orchestrator/workflows/core/__init__.py
+++ b/aria/orchestrator/workflows/core/__init__.py
@@ -17,4 +17,4 @@
 Core for the workflow execution mechanism
 """
 
-from . import task, translation, engine
+from . import engine

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/core/compile.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/compile.py b/aria/orchestrator/workflows/core/compile.py
new file mode 100644
index 0000000..932268a
--- /dev/null
+++ b/aria/orchestrator/workflows/core/compile.py
@@ -0,0 +1,116 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from ....modeling import models
+from .. import executor, api
+
+
+def create_execution_tasks(ctx, task_graph, default_executor):
+    execution = ctx.execution
+    _construct_execution_tasks(execution, task_graph, default_executor)
+    ctx.model.execution.update(execution)
+    return execution.tasks
+
+
+def _construct_execution_tasks(execution,
+                               task_graph,
+                               default_executor,
+                               stub_executor=executor.base.StubTaskExecutor,
+                               start_stub_type=models.Task.START_WORKFLOW,
+                               end_stub_type=models.Task.END_WORKFLOW,
+                               depends_on=()):
+    """
+    Translates the user graph to the execution graph
+    :param task_graph: The user's graph
+    :param start_stub_type: internal use
+    :param end_stub_type: internal use
+    :param depends_on: internal use
+    """
+    depends_on = list(depends_on)
+
+    # Insert start marker
+    start_task = models.Task(execution=execution,
+                             dependencies=depends_on,
+                             _api_id=_start_graph_suffix(task_graph.id),
+                             _stub_type=start_stub_type,
+                             _executor=stub_executor)
+
+    for task in task_graph.topological_order(reverse=True):
+        operation_dependencies = _get_tasks_from_dependencies(
+            execution, task_graph.get_dependencies(task), [start_task])
+
+        if isinstance(task, api.task.OperationTask):
+            models.Task.from_api_task(api_task=task,
+                                      executor=default_executor,
+                                      dependencies=operation_dependencies)
+
+        elif isinstance(task, api.task.WorkflowTask):
+            # Build the graph recursively while adding start and end markers
+            _construct_execution_tasks(
+                execution=execution,
+                task_graph=task,
+                default_executor=default_executor,
+                stub_executor=stub_executor,
+                start_stub_type=models.Task.START_SUBWROFKLOW,
+                end_stub_type=models.Task.END_SUBWORKFLOW,
+                depends_on=operation_dependencies
+            )
+        elif isinstance(task, api.task.StubTask):
+            models.Task(execution=execution,
+                        dependencies=operation_dependencies,
+                        _api_id=task.id,
+                        _executor=stub_executor,
+                        _stub_type=models.Task.STUB,
+                       )
+        else:
+            raise RuntimeError('Undefined state')
+
+    # Insert end marker
+    models.Task(dependencies=_get_non_dependent_tasks(execution) or [start_task],
+                execution=execution,
+                _api_id=_end_graph_suffix(task_graph.id),
+                _executor=stub_executor,
+                _stub_type=end_stub_type)
+
+
+def _start_graph_suffix(api_id):
+    return '{0}-Start'.format(api_id)
+
+
+def _end_graph_suffix(api_id):
+    return '{0}-End'.format(api_id)
+
+
+def _get_non_dependent_tasks(execution):
+    tasks_with_dependencies = set()
+    for task in execution.tasks:
+        tasks_with_dependencies.update(task.dependencies)
+    return list(set(execution.tasks) - set(tasks_with_dependencies))
+
+
+def _get_tasks_from_dependencies(execution, dependencies, default=()):
+    """
+    Returns task list from dependencies.
+    """
+    tasks = []
+    for dependency in dependencies:
+        if getattr(dependency, 'actor', False):
+            # This is
+            dependency_name = dependency.id
+        else:
+            dependency_name = _end_graph_suffix(dependency.id)
+        tasks.extend(task for task in execution.tasks if task._api_id == dependency_name)
+    return tasks or default

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index 3a96804..9f0ddd7 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -20,15 +20,13 @@ The workflow engine. Executes workflows
 import time
 from datetime import datetime
 
-import networkx
-
 from aria import logger
 from aria.modeling import models
 from aria.orchestrator import events
+from aria.orchestrator.context import operation
 
 from .. import exceptions
-from . import task as engine_task
-from . import translation
+from ..executor.base import StubTaskExecutor
 # Import required so all signals are registered
 from . import events_handler  # pylint: disable=unused-import
 
@@ -38,84 +36,105 @@ class Engine(logger.LoggerMixin):
     The workflow engine. Executes workflows
     """
 
-    def __init__(self, executor, workflow_context, tasks_graph, **kwargs):
+    def __init__(self, executors, **kwargs):
         super(Engine, self).__init__(**kwargs)
-        self._workflow_context = workflow_context
-        self._execution_graph = networkx.DiGraph()
-        translation.build_execution_graph(task_graph=tasks_graph,
-                                          execution_graph=self._execution_graph,
-                                          default_executor=executor)
+        self._executors = executors.copy()
+        self._executors.setdefault(StubTaskExecutor, StubTaskExecutor())
 
-    def execute(self):
+    def execute(self, ctx):
         """
         execute the workflow
         """
+        executing_tasks = []
         try:
-            events.start_workflow_signal.send(self._workflow_context)
+            events.start_workflow_signal.send(ctx)
             while True:
-                cancel = self._is_cancel()
+                cancel = self._is_cancel(ctx)
                 if cancel:
                     break
-                for task in self._ended_tasks():
-                    self._handle_ended_tasks(task)
-                for task in self._executable_tasks():
-                    self._handle_executable_task(task)
-                if self._all_tasks_consumed():
+                for task in self._ended_tasks(ctx, executing_tasks):
+                    self._handle_ended_tasks(ctx, task, executing_tasks)
+                for task in self._executable_tasks(ctx):
+                    self._handle_executable_task(ctx, task, executing_tasks)
+                if self._all_tasks_consumed(ctx):
                     break
                 else:
                     time.sleep(0.1)
             if cancel:
-                events.on_cancelled_workflow_signal.send(self._workflow_context)
+                events.on_cancelled_workflow_signal.send(ctx)
             else:
-                events.on_success_workflow_signal.send(self._workflow_context)
+                events.on_success_workflow_signal.send(ctx)
         except BaseException as e:
-            events.on_failure_workflow_signal.send(self._workflow_context, exception=e)
+            events.on_failure_workflow_signal.send(ctx, exception=e)
             raise
 
-    def cancel_execution(self):
+    @staticmethod
+    def cancel_execution(ctx):
         """
         Send a cancel request to the engine. If execution already started, execution status
         will be modified to 'cancelling' status. If execution is in pending mode, execution status
         will be modified to 'cancelled' directly.
         """
-        events.on_cancelling_workflow_signal.send(self._workflow_context)
+        events.on_cancelling_workflow_signal.send(ctx)
 
-    def _is_cancel(self):
-        return self._workflow_context.execution.status in (models.Execution.CANCELLING,
-                                                           models.Execution.CANCELLED)
+    @staticmethod
+    def _is_cancel(ctx):
+        execution = ctx.model.execution.refresh(ctx.execution)
+        return execution.status in (models.Execution.CANCELLING, models.Execution.CANCELLED)
 
-    def _executable_tasks(self):
+    def _executable_tasks(self, ctx):
         now = datetime.utcnow()
-        return (task for task in self._tasks_iter()
-                if task.is_waiting() and
-                task.due_at <= now and
-                not self._task_has_dependencies(task))
-
-    def _ended_tasks(self):
-        return (task for task in self._tasks_iter() if task.has_ended())
+        return (
+            task for task in self._tasks_iter(ctx)
+            if task.is_waiting() and task.due_at <= now and \
+            not self._task_has_dependencies(ctx, task)
+        )
 
-    def _task_has_dependencies(self, task):
-        return len(self._execution_graph.pred.get(task.id, {})) > 0
+    @staticmethod
+    def _ended_tasks(ctx, executing_tasks):
+        for task in executing_tasks:
+            if task.has_ended() and task in ctx._graph:
+                yield task
 
-    def _all_tasks_consumed(self):
-        return len(self._execution_graph.node) == 0
+    @staticmethod
+    def _task_has_dependencies(ctx, task):
+        return len(ctx._graph.pred.get(task, [])) > 0
 
-    def _tasks_iter(self):
-        for _, data in self._execution_graph.nodes_iter(data=True):
-            task = data['task']
-            if isinstance(task, engine_task.OperationTask):
-                if not task.model_task.has_ended():
-                    self._workflow_context.model.task.refresh(task.model_task)
-            yield task
+    @staticmethod
+    def _all_tasks_consumed(ctx):
+        return len(ctx._graph.node) == 0
 
     @staticmethod
-    def _handle_executable_task(task):
-        if isinstance(task, engine_task.OperationTask):
-            events.sent_task_signal.send(task)
-        task.execute()
+    def _tasks_iter(ctx):
+        for task in ctx.execution.tasks:
+            yield ctx.model.task.refresh(task)
+
+    def _handle_executable_task(self, ctx, task, executing_tasks):
+        task_executor = self._executors[task._executor]
+
+        # If the task is a stub, a default context is provided, else it should hold the context cls
+        context_cls = operation.BaseOperationContext if task._stub_type else task._context_cls
+        op_ctx = context_cls(
+            model_storage=ctx.model,
+            resource_storage=ctx.resource,
+            workdir=ctx._workdir,
+            task_id=task.id,
+            actor_id=task.actor.id if task.actor else None,
+            service_id=task.execution.service.id,
+            execution_id=task.execution.id,
+            name=task.name
+        )
+
+        executing_tasks.append(task)
+
+        if not task._stub_type:
+            events.sent_task_signal.send(op_ctx)
+        task_executor.execute(op_ctx)
 
-    def _handle_ended_tasks(self, task):
+    @staticmethod
+    def _handle_ended_tasks(ctx, task, executing_tasks):
+        executing_tasks.remove(task)
         if task.status == models.Task.FAILED and not task.ignore_failure:
             raise exceptions.ExecutorException('Workflow failed')
         else:
-            self._execution_graph.remove_node(task.id)
+            ctx._graph.remove_node(task)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/core/events_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/events_handler.py b/aria/orchestrator/workflows/core/events_handler.py
index 669fb43..2d71d2a 100644
--- a/aria/orchestrator/workflows/core/events_handler.py
+++ b/aria/orchestrator/workflows/core/events_handler.py
@@ -30,120 +30,121 @@ from ... import exceptions
 
 
 @events.sent_task_signal.connect
-def _task_sent(task, *args, **kwargs):
-    with task._update():
-        task.status = task.SENT
+def _task_sent(ctx, *args, **kwargs):
+    with ctx.persist_changes:
+        ctx.task.status = ctx.task.SENT
 
 
 @events.start_task_signal.connect
-def _task_started(task, *args, **kwargs):
-    with task._update():
-        task.started_at = datetime.utcnow()
-        task.status = task.STARTED
-    _update_node_state_if_necessary(task, is_transitional=True)
+def _task_started(ctx, *args, **kwargs):
+    with ctx.persist_changes:
+        ctx.task.started_at = datetime.utcnow()
+        ctx.task.status = ctx.task.STARTED
+        _update_node_state_if_necessary(ctx, is_transitional=True)
 
 
 @events.on_failure_task_signal.connect
-def _task_failed(task, exception, *args, **kwargs):
-    with task._update():
+def _task_failed(ctx, exception, *args, **kwargs):
+    with ctx.persist_changes:
         should_retry = all([
             not isinstance(exception, exceptions.TaskAbortException),
-            task.attempts_count < task.max_attempts or task.max_attempts == task.INFINITE_RETRIES,
-            # ignore_failure check here means the task will not be retries and it will be marked
+            ctx.task.attempts_count < ctx.task.max_attempts or
+            ctx.task.max_attempts == ctx.task.INFINITE_RETRIES,
+            # ignore_failure check here means the task will not be retried and it will be marked
             # as failed. The engine will also look at ignore_failure so it won't fail the
             # workflow.
-            not task.ignore_failure
+            not ctx.task.ignore_failure
         ])
         if should_retry:
             retry_interval = None
             if isinstance(exception, exceptions.TaskRetryException):
                 retry_interval = exception.retry_interval
             if retry_interval is None:
-                retry_interval = task.retry_interval
-            task.status = task.RETRYING
-            task.attempts_count += 1
-            task.due_at = datetime.utcnow() + timedelta(seconds=retry_interval)
+                retry_interval = ctx.task.retry_interval
+            ctx.task.status = ctx.task.RETRYING
+            ctx.task.attempts_count += 1
+            ctx.task.due_at = datetime.utcnow() + timedelta(seconds=retry_interval)
         else:
-            task.ended_at = datetime.utcnow()
-            task.status = task.FAILED
+            ctx.task.ended_at = datetime.utcnow()
+            ctx.task.status = ctx.task.FAILED
 
 
 @events.on_success_task_signal.connect
-def _task_succeeded(task, *args, **kwargs):
-    with task._update():
-        task.ended_at = datetime.utcnow()
-        task.status = task.SUCCESS
+def _task_succeeded(ctx, *args, **kwargs):
+    with ctx.persist_changes:
+        ctx.task.ended_at = datetime.utcnow()
+        ctx.task.status = ctx.task.SUCCESS
 
-    _update_node_state_if_necessary(task)
+        _update_node_state_if_necessary(ctx)
 
 
 @events.start_workflow_signal.connect
 def _workflow_started(workflow_context, *args, **kwargs):
-    execution = workflow_context.execution
-    # the execution may already be in the process of cancelling
-    if execution.status in (execution.CANCELLING, execution.CANCELLED):
-        return
-    execution.status = execution.STARTED
-    execution.started_at = datetime.utcnow()
-    workflow_context.execution = execution
+    with workflow_context.persist_changes:
+        execution = workflow_context.execution
+        # the execution may already be in the process of cancelling
+        if execution.status in (execution.CANCELLING, execution.CANCELLED):
+            return
+        execution.status = execution.STARTED
+        execution.started_at = datetime.utcnow()
 
 
 @events.on_failure_workflow_signal.connect
 def _workflow_failed(workflow_context, exception, *args, **kwargs):
-    execution = workflow_context.execution
-    execution.error = str(exception)
-    execution.status = execution.FAILED
-    execution.ended_at = datetime.utcnow()
-    workflow_context.execution = execution
+    with workflow_context.persist_changes:
+        execution = workflow_context.execution
+        execution.error = str(exception)
+        execution.status = execution.FAILED
+        execution.ended_at = datetime.utcnow()
 
 
 @events.on_success_workflow_signal.connect
 def _workflow_succeeded(workflow_context, *args, **kwargs):
-    execution = workflow_context.execution
-    execution.status = execution.SUCCEEDED
-    execution.ended_at = datetime.utcnow()
-    workflow_context.execution = execution
+    with workflow_context.persist_changes:
+        execution = workflow_context.execution
+        execution.status = execution.SUCCEEDED
+        execution.ended_at = datetime.utcnow()
 
 
 @events.on_cancelled_workflow_signal.connect
 def _workflow_cancelled(workflow_context, *args, **kwargs):
-    execution = workflow_context.execution
-    # _workflow_cancelling function may have called this function already
-    if execution.status == execution.CANCELLED:
-        return
-    # the execution may have already been finished
-    elif execution.status in (execution.SUCCEEDED, execution.FAILED):
-        _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, execution.status)
-    else:
-        execution.status = execution.CANCELLED
-        execution.ended_at = datetime.utcnow()
-        workflow_context.execution = execution
+    with workflow_context.persist_changes:
+        execution = workflow_context.execution
+        # _workflow_cancelling function may have called this function already
+        if execution.status == execution.CANCELLED:
+            return
+        # the execution may have already been finished
+        elif execution.status in (execution.SUCCEEDED, execution.FAILED):
+            _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, execution.status)
+        else:
+            execution.status = execution.CANCELLED
+            execution.ended_at = datetime.utcnow()
 
 
 @events.on_cancelling_workflow_signal.connect
 def _workflow_cancelling(workflow_context, *args, **kwargs):
-    execution = workflow_context.execution
-    if execution.status == execution.PENDING:
-        return _workflow_cancelled(workflow_context=workflow_context)
-    # the execution may have already been finished
-    elif execution.status in (execution.SUCCEEDED, execution.FAILED):
-        _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, execution.status)
-    else:
-        execution.status = execution.CANCELLING
-        workflow_context.execution = execution
-
-
-def _update_node_state_if_necessary(task, is_transitional=False):
+    with workflow_context.persist_changes:
+        execution = workflow_context.execution
+        if execution.status == execution.PENDING:
+            return _workflow_cancelled(workflow_context=workflow_context)
+        # the execution may have already been finished
+        elif execution.status in (execution.SUCCEEDED, execution.FAILED):
+            _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, execution.status)
+        else:
+            execution.status = execution.CANCELLING
+
+
+def _update_node_state_if_necessary(ctx, is_transitional=False):
     # TODO: this is not the right way to check! the interface name is arbitrary
     # and also will *never* be the type name
-    model_task = task.model_task
-    node = model_task.node if model_task is not None else None
+    node = ctx.task.node if ctx.task is not None else None
     if (node is not None) and \
-        (task.interface_name in ('Standard', 'tosca.interfaces.node.lifecycle.Standard')):
-        state = node.determine_state(op_name=task.operation_name, is_transitional=is_transitional)
+        (ctx.task.interface_name in ('Standard', 'tosca.interfaces.node.lifecycle.Standard')):
+        state = node.determine_state(op_name=ctx.task.operation_name,
+                                     is_transitional=is_transitional)
         if state:
             node.state = state
-            task.context.model.node.update(node)
+            ctx.model.node.update(node)
 
 
 def _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, status):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
deleted file mode 100644
index d732f09..0000000
--- a/aria/orchestrator/workflows/core/task.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow tasks
-"""
-
-from contextlib import contextmanager
-from datetime import datetime
-from functools import (
-    partial,
-    wraps,
-)
-
-
-from ....modeling import models
-from ...context import operation as operation_context
-from .. import exceptions
-
-
-def _locked(func=None):
-    if func is None:
-        return partial(_locked, func=_locked)
-
-    @wraps(func)
-    def _wrapper(self, value, **kwargs):
-        if self._update_fields is None:
-            raise exceptions.TaskException('Task is not in update mode')
-        return func(self, value, **kwargs)
-    return _wrapper
-
-
-class BaseTask(object):
-    """
-    Base class for Task objects
-    """
-
-    def __init__(self, id, executor, *args, **kwargs):
-        super(BaseTask, self).__init__(*args, **kwargs)
-        self._id = id
-        self._executor = executor
-
-    def execute(self):
-        return self._executor.execute(self)
-
-    @property
-    def id(self):
-        """
-        :return: the task's id
-        """
-        return self._id
-
-
-class StubTask(BaseTask):
-    """
-    Base stub task for marker user tasks that only mark the start/end of a workflow
-    or sub-workflow
-    """
-    STARTED = models.Task.STARTED
-    SUCCESS = models.Task.SUCCESS
-
-    def __init__(self, *args, **kwargs):
-        super(StubTask, self).__init__(*args, **kwargs)
-        self.status = models.Task.PENDING
-        self.due_at = datetime.utcnow()
-
-    def has_ended(self):
-        return self.status == self.SUCCESS
-
-    def is_waiting(self):
-        return not self.has_ended()
-
-
-class StartWorkflowTask(StubTask):
-    """
-    Task marking a workflow start
-    """
-    pass
-
-
-class EndWorkflowTask(StubTask):
-    """
-    Task marking a workflow end
-    """
-    pass
-
-
-class StartSubWorkflowTask(StubTask):
-    """
-    Task marking a subworkflow start
-    """
-    pass
-
-
-class EndSubWorkflowTask(StubTask):
-    """
-    Task marking a subworkflow end
-    """
-    pass
-
-
-class OperationTask(BaseTask):
-    """
-    Operation task
-    """
-    def __init__(self, api_task, *args, **kwargs):
-        # If no executor is provided, we infer that this is an empty task which does not need to be
-        # executed.
-        super(OperationTask, self).__init__(id=api_task.id, *args, **kwargs)
-        self._workflow_context = api_task._workflow_context
-        self.interface_name = api_task.interface_name
-        self.operation_name = api_task.operation_name
-        model_storage = api_task._workflow_context.model
-
-        actor = getattr(api_task.actor, '_wrapped', api_task.actor)
-
-        base_task_model = model_storage.task.model_cls
-        if isinstance(actor, models.Node):
-            context_cls = operation_context.NodeOperationContext
-            create_task_model = base_task_model.for_node
-        elif isinstance(actor, models.Relationship):
-            context_cls = operation_context.RelationshipOperationContext
-            create_task_model = base_task_model.for_relationship
-        else:
-            raise RuntimeError('No operation context could be created for {actor.model_cls}'
-                               .format(actor=actor))
-
-        task_model = create_task_model(
-            name=api_task.name,
-            actor=actor,
-            status=base_task_model.PENDING,
-            max_attempts=api_task.max_attempts,
-            retry_interval=api_task.retry_interval,
-            ignore_failure=api_task.ignore_failure,
-            execution=self._workflow_context.execution,
-
-            # Only non-stub tasks have these fields
-            plugin=api_task.plugin,
-            function=api_task.function,
-            arguments=api_task.arguments
-        )
-        self._workflow_context.model.task.put(task_model)
-
-        self._ctx = context_cls(name=api_task.name,
-                                model_storage=self._workflow_context.model,
-                                resource_storage=self._workflow_context.resource,
-                                service_id=self._workflow_context._service_id,
-                                task_id=task_model.id,
-                                actor_id=actor.id,
-                                execution_id=self._workflow_context._execution_id,
-                                workdir=self._workflow_context._workdir)
-        self._task_id = task_model.id
-        self._update_fields = None
-
-    @contextmanager
-    def _update(self):
-        """
-        A context manager which puts the task into update mode, enabling fields update.
-        :yields: None
-        """
-        self._update_fields = {}
-        try:
-            yield
-            for key, value in self._update_fields.items():
-                setattr(self.model_task, key, value)
-            self.model_task = self.model_task
-        finally:
-            self._update_fields = None
-
-    @property
-    def model_task(self):
-        """
-        Returns the task model in storage
-        :return: task in storage
-        """
-        return self._workflow_context.model.task.get(self._task_id)
-
-    @model_task.setter
-    def model_task(self, value):
-        self._workflow_context.model.task.put(value)
-
-    @property
-    def context(self):
-        """
-        Contexts for the operation
-        :return:
-        """
-        return self._ctx
-
-    @property
-    def status(self):
-        """
-        Returns the task status
-        :return: task status
-        """
-        return self.model_task.status
-
-    @status.setter
-    @_locked
-    def status(self, value):
-        self._update_fields['status'] = value
-
-    @property
-    def started_at(self):
-        """
-        Returns when the task started
-        :return: when task started
-        """
-        return self.model_task.started_at
-
-    @started_at.setter
-    @_locked
-    def started_at(self, value):
-        self._update_fields['started_at'] = value
-
-    @property
-    def ended_at(self):
-        """
-        Returns when the task ended
-        :return: when task ended
-        """
-        return self.model_task.ended_at
-
-    @ended_at.setter
-    @_locked
-    def ended_at(self, value):
-        self._update_fields['ended_at'] = value
-
-    @property
-    def attempts_count(self):
-        """
-        Returns the attempts count for the task
-        :return: attempts count
-        """
-        return self.model_task.attempts_count
-
-    @attempts_count.setter
-    @_locked
-    def attempts_count(self, value):
-        self._update_fields['attempts_count'] = value
-
-    @property
-    def due_at(self):
-        """
-        Returns the minimum datetime in which the task can be executed
-        :return: eta
-        """
-        return self.model_task.due_at
-
-    @due_at.setter
-    @_locked
-    def due_at(self, value):
-        self._update_fields['due_at'] = value
-
-    def __getattr__(self, attr):
-        try:
-            return getattr(self.model_task, attr)
-        except AttributeError:
-            return super(OperationTask, self).__getattribute__(attr)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/core/translation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/translation.py b/aria/orchestrator/workflows/core/translation.py
deleted file mode 100644
index fec108b..0000000
--- a/aria/orchestrator/workflows/core/translation.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Translation of user graph's API to the execution graph
-"""
-
-from .. import api
-from ..executor import base
-from . import task as core_task
-
-
-def build_execution_graph(
-        task_graph,
-        execution_graph,
-        default_executor,
-        start_cls=core_task.StartWorkflowTask,
-        end_cls=core_task.EndWorkflowTask,
-        depends_on=()):
-    """
-    Translates the user graph to the execution graph
-    :param task_graph: The user's graph
-    :param workflow_context: The workflow
-    :param execution_graph: The execution graph that is being built
-    :param start_cls: internal use
-    :param end_cls: internal use
-    :param depends_on: internal use
-    """
-    # Insert start marker
-    start_task = start_cls(id=_start_graph_suffix(task_graph.id), executor=base.StubTaskExecutor())
-    _add_task_and_dependencies(execution_graph, start_task, depends_on)
-
-    for api_task in task_graph.topological_order(reverse=True):
-        dependencies = task_graph.get_dependencies(api_task)
-        operation_dependencies = _get_tasks_from_dependencies(
-            execution_graph, dependencies, default=[start_task])
-
-        if isinstance(api_task, api.task.OperationTask):
-            operation_task = core_task.OperationTask(api_task, executor=default_executor)
-            _add_task_and_dependencies(execution_graph, operation_task, operation_dependencies)
-        elif isinstance(api_task, api.task.WorkflowTask):
-            # Build the graph recursively while adding start and end markers
-            build_execution_graph(
-                task_graph=api_task,
-                execution_graph=execution_graph,
-                default_executor=default_executor,
-                start_cls=core_task.StartSubWorkflowTask,
-                end_cls=core_task.EndSubWorkflowTask,
-                depends_on=operation_dependencies
-            )
-        elif isinstance(api_task, api.task.StubTask):
-            stub_task = core_task.StubTask(id=api_task.id, executor=base.StubTaskExecutor())
-            _add_task_and_dependencies(execution_graph, stub_task, operation_dependencies)
-        else:
-            raise RuntimeError('Undefined state')
-
-    # Insert end marker
-    workflow_dependencies = _get_tasks_from_dependencies(
-        execution_graph,
-        _get_non_dependency_tasks(task_graph),
-        default=[start_task])
-    end_task = end_cls(id=_end_graph_suffix(task_graph.id), executor=base.StubTaskExecutor())
-    _add_task_and_dependencies(execution_graph, end_task, workflow_dependencies)
-
-
-def _add_task_and_dependencies(execution_graph, operation_task, operation_dependencies=()):
-    execution_graph.add_node(operation_task.id, task=operation_task)
-    for dependency in operation_dependencies:
-        execution_graph.add_edge(dependency.id, operation_task.id)
-
-
-def _get_tasks_from_dependencies(execution_graph, dependencies, default=()):
-    """
-    Returns task list from dependencies.
-    """
-    tasks = []
-    for dependency in dependencies:
-        if isinstance(dependency, (api.task.OperationTask, api.task.StubTask)):
-            dependency_id = dependency.id
-        else:
-            dependency_id = _end_graph_suffix(dependency.id)
-        tasks.append(execution_graph.node[dependency_id]['task'])
-    return tasks or default
-
-
-def _start_graph_suffix(id):
-    return '{0}-Start'.format(id)
-
-
-def _end_graph_suffix(id):
-    return '{0}-End'.format(id)
-
-
-def _get_non_dependency_tasks(graph):
-    for task in graph.tasks:
-        if len(list(graph.get_dependents(task))) == 0:
-            yield task

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/events_logging.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/events_logging.py b/aria/orchestrator/workflows/events_logging.py
index 036c1f7..4cee867 100644
--- a/aria/orchestrator/workflows/events_logging.py
+++ b/aria/orchestrator/workflows/events_logging.py
@@ -34,31 +34,32 @@ def _get_task_name(task):
 
 
 @events.start_task_signal.connect
-def _start_task_handler(task, **kwargs):
+def _start_task_handler(ctx, **kwargs):
     # If the task has no function this is an empty task.
-    if task.function:
+    if ctx.task.function:
         suffix = 'started...'
-        logger = task.context.logger.info
+        logger = ctx.logger.info
     else:
         suffix = 'has no implementation'
-        logger = task.context.logger.debug
+        logger = ctx.logger.debug
 
     logger('{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
-        name=_get_task_name(task), task=task, suffix=suffix))
+        name=_get_task_name(ctx.task), task=ctx.task, suffix=suffix))
+
 
 @events.on_success_task_signal.connect
-def _success_task_handler(task, **kwargs):
-    if not task.function:
+def _success_task_handler(ctx, **kwargs):
+    if not ctx.task.function:
         return
-    task.context.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
-                             .format(name=_get_task_name(task), task=task))
+    ctx.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
+                    .format(name=_get_task_name(ctx.task), task=ctx.task))
 
 
 @events.on_failure_task_signal.connect
-def _failure_operation_handler(task, traceback, **kwargs):
-    task.context.logger.error(
+def _failure_operation_handler(ctx, traceback, **kwargs):
+    ctx.logger.error(
         '{name} {task.interface_name}.{task.operation_name} failed'
-        .format(name=_get_task_name(task), task=task), extra=dict(traceback=traceback)
+        .format(name=_get_task_name(ctx.task), task=ctx.task), extra=dict(traceback=traceback)
     )
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/executor/base.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/base.py b/aria/orchestrator/workflows/executor/base.py
index 7fece6f..6a3c9d2 100644
--- a/aria/orchestrator/workflows/executor/base.py
+++ b/aria/orchestrator/workflows/executor/base.py
@@ -28,19 +28,19 @@ class BaseExecutor(logger.LoggerMixin):
     def _execute(self, task):
         raise NotImplementedError
 
-    def execute(self, task):
+    def execute(self, ctx):
         """
         Execute a task
         :param task: task to execute
         """
-        if task.function:
-            self._execute(task)
+        if ctx.task.function:
+            self._execute(ctx)
         else:
             # In this case the task is missing a function. This task still gets to an
-            # executor, but since there is nothing to run, we by default simply skip the execution
-            # itself.
-            self._task_started(task)
-            self._task_succeeded(task)
+            # executor, but since there is nothing to run, we by default simply skip the
+            # execution itself.
+            self._task_started(ctx)
+            self._task_succeeded(ctx)
 
     def close(self):
         """
@@ -49,18 +49,19 @@ class BaseExecutor(logger.LoggerMixin):
         pass
 
     @staticmethod
-    def _task_started(task):
-        events.start_task_signal.send(task)
+    def _task_started(ctx):
+        events.start_task_signal.send(ctx)
 
     @staticmethod
-    def _task_failed(task, exception, traceback=None):
-        events.on_failure_task_signal.send(task, exception=exception, traceback=traceback)
+    def _task_failed(ctx, exception, traceback=None):
+        events.on_failure_task_signal.send(ctx, exception=exception, traceback=traceback)
 
     @staticmethod
-    def _task_succeeded(task):
-        events.on_success_task_signal.send(task)
+    def _task_succeeded(ctx):
+        events.on_success_task_signal.send(ctx)
 
 
 class StubTaskExecutor(BaseExecutor):                                                               # pylint: disable=abstract-method
-    def execute(self, task):
-        task.status = task.SUCCESS
+    def execute(self, ctx, *args, **kwargs):
+        with ctx.persist_changes:
+            ctx.task.status = ctx.task.SUCCESS

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index 72080b4..9d86125 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -18,37 +18,36 @@ Dry executor
 """
 from datetime import datetime
 
-from .base import BaseExecutor
+from . import base
 
 
-class DryExecutor(BaseExecutor):                                                                    # pylint: disable=abstract-method
+class DryExecutor(base.BaseExecutor):                                                                    # pylint: disable=abstract-method
     """
     Executor which dry runs tasks - prints task information without causing any side effects
     """
-    def execute(self, task):
-        # updating the task manually instead of calling self._task_started(task),
-        # to avoid any side effects raising that event might cause
-        with task._update():
-            task.started_at = datetime.utcnow()
-            task.status = task.STARTED
-
-        dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
-        logger = task.context.logger.info if task.function else task.context.logger.debug
-
-        if hasattr(task.actor, 'source_node'):
-            name = '{source_node.name}->{target_node.name}'.format(
-                source_node=task.actor.source_node, target_node=task.actor.target_node)
-        else:
-            name = task.actor.name
-
-        if task.function:
-            logger(dry_msg.format(name=name, task=task, suffix='started...'))
-            logger(dry_msg.format(name=name, task=task, suffix='successful'))
-        else:
-            logger(dry_msg.format(name=name, task=task, suffix='has no implementation'))
-
-        # updating the task manually instead of calling self._task_succeeded(task),
-        # to avoid any side effects raising that event might cause
-        with task._update():
-            task.ended_at = datetime.utcnow()
-            task.status = task.SUCCESS
+    def execute(self, ctx):
+        with ctx.persist_changes:
+            # updating the task manually instead of calling self._task_started(task),
+            # to avoid any side effects raising that event might cause
+            ctx.task.started_at = datetime.utcnow()
+            ctx.task.status = ctx.task.STARTED
+
+            dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
+            logger = ctx.logger.info if ctx.task.function else ctx.logger.debug
+
+            if hasattr(ctx.task.actor, 'source_node'):
+                name = '{source_node.name}->{target_node.name}'.format(
+                    source_node=ctx.task.actor.source_node, target_node=ctx.task.actor.target_node)
+            else:
+                name = ctx.task.actor.name
+
+            if ctx.task.function:
+                logger(dry_msg.format(name=name, task=ctx.task, suffix='started...'))
+                logger(dry_msg.format(name=name, task=ctx.task, suffix='successful'))
+            else:
+                logger(dry_msg.format(name=name, task=ctx.task, suffix='has no implementation'))
+
+            # updating the task manually instead of calling self._task_succeeded(task),
+            # to avoid any side effects raising that event might cause
+            ctx.task.ended_at = datetime.utcnow()
+            ctx.task.status = ctx.task.SUCCESS

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 634f1f2..8518b33 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -113,17 +113,17 @@ class ProcessExecutor(base.BaseExecutor):
         self._server_socket.close()
         self._listener_thread.join(timeout=60)
 
-    def _execute(self, task):
+    def _execute(self, ctx):
         self._check_closed()
-        self._tasks[task.id] = task
+        self._tasks[ctx.task.id] = ctx
 
         # Temporary file used to pass arguments to the started subprocess
         file_descriptor, arguments_json_path = tempfile.mkstemp(prefix='executor-', suffix='.json')
         os.close(file_descriptor)
         with open(arguments_json_path, 'wb') as f:
-            f.write(pickle.dumps(self._create_arguments_dict(task)))
+            f.write(pickle.dumps(self._create_arguments_dict(ctx)))
 
-        env = self._construct_subprocess_env(task=task)
+        env = self._construct_subprocess_env(task=ctx.task)
         # Asynchronously start the operation in a subprocess
         subprocess.Popen(
             '{0} {1} {2}'.format(sys.executable, __file__, arguments_json_path),
@@ -137,13 +137,13 @@ class ProcessExecutor(base.BaseExecutor):
         if self._stopped:
             raise RuntimeError('Executor closed')
 
-    def _create_arguments_dict(self, task):
+    def _create_arguments_dict(self, ctx):
         return {
-            'task_id': task.id,
-            'function': task.function,
-            'operation_arguments': dict(arg.unwrapped for arg in task.arguments.values()),
+            'task_id': ctx.task.id,
+            'function': ctx.task.function,
+            'operation_arguments': dict(arg.unwrapped for arg in ctx.task.arguments.values()),
             'port': self._server_port,
-            'context': task.context.serialization_dict,
+            'context': ctx.serialization_dict,
         }
 
     def _construct_subprocess_env(self, task):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index 56a56a5..8c447b6 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -46,8 +46,8 @@ class ThreadExecutor(BaseExecutor):
             thread.start()
             self._pool.append(thread)
 
-    def _execute(self, task):
-        self._queue.put(task)
+    def _execute(self, ctx):
+        self._queue.put(ctx)
 
     def close(self):
         self._stopped = True
@@ -57,15 +57,15 @@ class ThreadExecutor(BaseExecutor):
     def _processor(self):
         while not self._stopped:
             try:
-                task = self._queue.get(timeout=1)
-                self._task_started(task)
+                ctx = self._queue.get(timeout=1)
+                self._task_started(ctx)
                 try:
-                    task_func = imports.load_attribute(task.function)
-                    arguments = dict(arg.unwrapped for arg in task.arguments.values())
-                    task_func(ctx=task.context, **arguments)
-                    self._task_succeeded(task)
+                    task_func = imports.load_attribute(ctx.task.function)
+                    arguments = dict(arg.unwrapped for arg in ctx.task.arguments.values())
+                    task_func(ctx=ctx, **arguments)
+                    self._task_succeeded(ctx)
                 except BaseException as e:
-                    self._task_failed(task,
+                    self._task_failed(ctx,
                                       exception=e,
                                       traceback=exceptions.get_exception_as_string(*sys.exc_info()))
             # Daemon threads

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/__init__.py
----------------------------------------------------------------------
diff --git a/tests/__init__.py b/tests/__init__.py
index d2858d2..ace30c8 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -15,4 +15,6 @@
 
 import os
 
+from . import storage, mock
+
 ROOT_DIR = os.path.dirname(os.path.dirname(__file__))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/context/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/__init__.py b/tests/orchestrator/context/__init__.py
index 4fde0a7..086a066 100644
--- a/tests/orchestrator/context/__init__.py
+++ b/tests/orchestrator/context/__init__.py
@@ -15,7 +15,7 @@
 
 import sys
 
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 
 
 def op_path(func, module_path=None):
@@ -25,5 +25,8 @@ def op_path(func, module_path=None):
 
 def execute(workflow_func, workflow_context, executor):
     graph = workflow_func(ctx=workflow_context)
-    eng = engine.Engine(executor=executor, workflow_context=workflow_context, tasks_graph=graph)
-    eng.execute()
+
+    compile.create_execution_tasks(workflow_context, graph, executor.__class__)
+    eng = engine.Engine(executors={executor.__class__: executor})
+
+    eng.execute(workflow_context)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 3dcfaa2..9550d12 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -50,21 +50,12 @@ def ctx(tmpdir):
 
 
 @pytest.fixture
-def process_executor():
-    ex = process.ProcessExecutor(**dict(python_path=tests.ROOT_DIR))
-    try:
-        yield ex
-    finally:
-        ex.close()
-
-
-@pytest.fixture
 def thread_executor():
-    ex = thread.ThreadExecutor()
+    result = thread.ThreadExecutor()
     try:
-        yield ex
+        yield result
     finally:
-        ex.close()
+        result.close()
 
 
 @pytest.fixture
@@ -266,12 +257,12 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
     (process.ProcessExecutor, {'python_path': [tests.ROOT_DIR]}),
 ])
 def executor(request):
-    executor_cls, executor_kwargs = request.param
-    result = executor_cls(**executor_kwargs)
+    ex_cls, kwargs = request.param
+    ex = ex_cls(**kwargs)
     try:
-        yield result
+        yield ex
     finally:
-        result.close()
+        ex.close()
 
 
 def test_node_operation_logging(ctx, executor):
@@ -304,7 +295,6 @@ def test_node_operation_logging(ctx, executor):
                 arguments=arguments
             )
         )
-
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor)
     _assert_loggins(ctx, arguments)
 
@@ -418,7 +408,7 @@ def _assert_loggins(ctx, arguments):
     assert len(executions) == 1
     execution = executions[0]
 
-    tasks = ctx.model.task.list()
+    tasks = ctx.model.task.list(filters={'_stub_type': None})
     assert len(tasks) == 1
     task = tasks[0]
     assert len(task.logs) == 4

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 0919e81..5db5b63 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -16,7 +16,7 @@
 import pytest
 
 from aria.orchestrator.workflows import api
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 from aria.orchestrator.workflows.executor import process
 from aria.orchestrator import workflow, operation
 import tests
@@ -48,15 +48,15 @@ def test_serialize_operation_context(context, executor, tmpdir):
     context.model.node.update(node)
 
     graph = _mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
-    eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
-    eng.execute()
+    compile.create_execution_tasks(context, graph, executor.__class__)
+    eng = engine.Engine({executor.__class__: executor})
+    eng.execute(context)
 
 
 @workflow
 def _mock_workflow(ctx, graph):
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    task = api.task.OperationTask(node, interface_name='test', operation_name='op')
-    graph.add_tasks(task)
+    graph.add_tasks(api.task.OperationTask(node, interface_name='test', operation_name='op'))
     return graph
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index f667460..1695320 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -28,7 +28,7 @@ from aria.orchestrator.execution_plugin.exceptions import ProcessException
 from aria.orchestrator.execution_plugin import local
 from aria.orchestrator.execution_plugin import constants
 from aria.orchestrator.workflows.executor import process
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 
 from tests import mock
 from tests import storage
@@ -500,11 +500,9 @@ if __name__ == '__main__':
                 arguments=arguments))
             return graph
         tasks_graph = mock_workflow(ctx=workflow_context)  # pylint: disable=no-value-for-parameter
-        eng = engine.Engine(
-            executor=executor,
-            workflow_context=workflow_context,
-            tasks_graph=tasks_graph)
-        eng.execute()
+        compile.create_execution_tasks(workflow_context, tasks_graph, executor.__class__)
+        eng = engine.Engine({executor.__class__: executor})
+        eng.execute(workflow_context)
         return workflow_context.model.node.get_by_name(
             mock.models.DEPENDENCY_NODE_NAME).attributes
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index 8c4dd2d..fb1dc09 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -29,7 +29,7 @@ from aria.orchestrator import events
 from aria.orchestrator import workflow
 from aria.orchestrator.workflows import api
 from aria.orchestrator.workflows.executor import process
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 from aria.orchestrator.workflows.exceptions import ExecutorException
 from aria.orchestrator.exceptions import TaskAbortException, TaskRetryException
 from aria.orchestrator.execution_plugin import operations
@@ -254,11 +254,10 @@ class TestWithActualSSHServer(object):
             graph.sequence(*ops)
             return graph
         tasks_graph = mock_workflow(ctx=self._workflow_context)  # pylint: disable=no-value-for-parameter
-        eng = engine.Engine(
-            executor=self._executor,
-            workflow_context=self._workflow_context,
-            tasks_graph=tasks_graph)
-        eng.execute()
+        compile.create_execution_tasks(
+            self._workflow_context, tasks_graph, self._executor.__class__)
+        eng = engine.Engine({self._executor.__class__: self._executor})
+        eng.execute(self._workflow_context)
         return self._workflow_context.model.node.get_by_name(
             mock.models.DEPENDENCY_NODE_NAME).attributes
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/test_workflow_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_workflow_runner.py b/tests/orchestrator/test_workflow_runner.py
index c5a62ae..40f9035 100644
--- a/tests/orchestrator/test_workflow_runner.py
+++ b/tests/orchestrator/test_workflow_runner.py
@@ -96,20 +96,20 @@ def test_default_executor(request):
     # validates the ProcessExecutor is used by the workflow runner by default
     mock_workflow = _setup_mock_workflow_in_service(request)
 
-    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+    with mock.patch('aria.orchestrator.workflow_runner.engine.Engine') as mock_engine_cls:
         _create_workflow_runner(request, mock_workflow)
         _, engine_kwargs = mock_engine_cls.call_args
-        assert isinstance(engine_kwargs.get('executor'), ProcessExecutor)
+        assert isinstance(engine_kwargs.get('executors').values()[0], ProcessExecutor)
 
 
 def test_custom_executor(request):
     mock_workflow = _setup_mock_workflow_in_service(request)
 
     custom_executor = mock.MagicMock()
-    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+    with mock.patch('aria.orchestrator.workflow_runner.engine.Engine') as mock_engine_cls:
         _create_workflow_runner(request, mock_workflow, executor=custom_executor)
         _, engine_kwargs = mock_engine_cls.call_args
-        assert engine_kwargs.get('executor') == custom_executor
+        assert engine_kwargs.get('executors').values()[0] == custom_executor
 
 
 def test_task_configuration_parameters(request):
@@ -117,48 +117,47 @@ def test_task_configuration_parameters(request):
 
     task_max_attempts = 5
     task_retry_interval = 7
-    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+    with mock.patch('aria.orchestrator.workflow_runner.engine.Engine.execute') as \
+            mock_engine_execute:
         _create_workflow_runner(request, mock_workflow, task_max_attempts=task_max_attempts,
-                                task_retry_interval=task_retry_interval)
-        _, engine_kwargs = mock_engine_cls.call_args
-        assert engine_kwargs['workflow_context']._task_max_attempts == task_max_attempts
-        assert engine_kwargs['workflow_context']._task_retry_interval == task_retry_interval
+                                task_retry_interval=task_retry_interval).execute()
+        _, engine_kwargs = mock_engine_execute.call_args
+        assert engine_kwargs['ctx']._task_max_attempts == task_max_attempts
+        assert engine_kwargs['ctx']._task_retry_interval == task_retry_interval
 
 
 def test_execute(request, service):
     mock_workflow = _setup_mock_workflow_in_service(request)
 
     mock_engine = mock.MagicMock()
-    with mock.patch('aria.orchestrator.workflow_runner.Engine', return_value=mock_engine) \
-            as mock_engine_cls:
+    with mock.patch('aria.orchestrator.workflow_runner.engine.Engine.execute',
+                    return_value=mock_engine) as mock_engine_execute:
         workflow_runner = _create_workflow_runner(request, mock_workflow)
+        workflow_runner.execute()
 
-        _, engine_kwargs = mock_engine_cls.call_args
-        assert engine_kwargs['workflow_context'].service.id == service.id
-        assert engine_kwargs['workflow_context'].execution.workflow_name == 'test_workflow'
+        _, engine_kwargs = mock_engine_execute.call_args
+        assert engine_kwargs['ctx'].service.id == service.id
+        assert engine_kwargs['ctx'].execution.workflow_name == 'test_workflow'
 
-        workflow_runner.execute()
-        mock_engine.execute.assert_called_once_with()
+        mock_engine_execute.assert_called_once_with(ctx=workflow_runner._workflow_context)
 
 
 def test_cancel_execution(request):
     mock_workflow = _setup_mock_workflow_in_service(request)
 
     mock_engine = mock.MagicMock()
-    with mock.patch('aria.orchestrator.workflow_runner.Engine', return_value=mock_engine):
+    with mock.patch('aria.orchestrator.workflow_runner.engine.Engine', return_value=mock_engine):
         workflow_runner = _create_workflow_runner(request, mock_workflow)
         workflow_runner.cancel()
-        mock_engine.cancel_execution.assert_called_once_with()
+        mock_engine.cancel_execution.assert_called_once_with(ctx=workflow_runner._workflow_context)
 
 
 def test_execution_model_creation(request, service, model):
     mock_workflow = _setup_mock_workflow_in_service(request)
 
-    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+    with mock.patch('aria.orchestrator.workflow_runner.engine.Engine'):
         workflow_runner = _create_workflow_runner(request, mock_workflow)
 
-        _, engine_kwargs = mock_engine_cls.call_args
-        assert engine_kwargs['workflow_context'].execution == workflow_runner.execution
         assert model.execution.get(workflow_runner.execution.id) == workflow_runner.execution
         assert workflow_runner.execution.service.id == service.id
         assert workflow_runner.execution.workflow_name == mock_workflow
@@ -173,7 +172,7 @@ def test_execution_inputs_override_workflow_inputs(request):
         inputs=dict((name, models.Input.wrap(name, val)) for name, val
                     in wf_inputs.iteritems()))
 
-    with mock.patch('aria.orchestrator.workflow_runner.Engine'):
+    with mock.patch('aria.orchestrator.workflow_runner.engine.Engine'):
         workflow_runner = _create_workflow_runner(
             request, mock_workflow, inputs={'input2': 'overriding-value2', 'input3': 7})
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 0438544..b77d284 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -28,7 +28,7 @@ from aria.orchestrator.workflows import (
     api,
     exceptions,
 )
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 from aria.orchestrator.workflows.executor import thread
 
 from tests import mock, storage
@@ -44,15 +44,15 @@ class BaseTest(object):
         eng = cls._engine(workflow_func=workflow_func,
                           workflow_context=workflow_context,
                           executor=executor)
-        eng.execute()
+        eng.execute(ctx=workflow_context)
         return eng
 
     @staticmethod
     def _engine(workflow_func, workflow_context, executor):
         graph = workflow_func(ctx=workflow_context)
-        return engine.Engine(executor=executor,
-                             workflow_context=workflow_context,
-                             tasks_graph=graph)
+        compile.create_execution_tasks(workflow_context, graph, executor.__class__)
+
+        return engine.Engine(executors={executor.__class__: executor})
 
     @staticmethod
     def _create_interface(ctx, func, arguments=None):
@@ -98,9 +98,10 @@ class BaseTest(object):
 
     @pytest.fixture(autouse=True)
     def signals_registration(self, ):
-        def sent_task_handler(*args, **kwargs):
-            calls = global_test_holder.setdefault('sent_task_signal_calls', 0)
-            global_test_holder['sent_task_signal_calls'] = calls + 1
+        def sent_task_handler(ctx, *args, **kwargs):
+            if ctx.task._stub_type is None:
+                calls = global_test_holder.setdefault('sent_task_signal_calls', 0)
+                global_test_holder['sent_task_signal_calls'] = calls + 1
 
         def start_workflow_handler(workflow_context, *args, **kwargs):
             workflow_context.states.append('start')
@@ -255,10 +256,10 @@ class TestCancel(BaseTest):
         eng = self._engine(workflow_func=mock_workflow,
                            workflow_context=workflow_context,
                            executor=executor)
-        t = threading.Thread(target=eng.execute)
+        t = threading.Thread(target=eng.execute, kwargs=dict(ctx=workflow_context))
         t.start()
         time.sleep(10)
-        eng.cancel_execution()
+        eng.cancel_execution(workflow_context)
         t.join(timeout=60) # we need to give this a *lot* of time because Travis can be *very* slow
         assert not t.is_alive() # if join is timed out it will not raise an exception
         assert workflow_context.states == ['start', 'cancel']
@@ -277,7 +278,7 @@ class TestCancel(BaseTest):
         eng = self._engine(workflow_func=mock_workflow,
                            workflow_context=workflow_context,
                            executor=executor)
-        eng.cancel_execution()
+        eng.cancel_execution(workflow_context)
         execution = workflow_context.execution
         assert execution.status == models.Execution.CANCELLED
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/core/test_events.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_events.py b/tests/orchestrator/workflows/core/test_events.py
index 6d542e9..2b82443 100644
--- a/tests/orchestrator/workflows/core/test_events.py
+++ b/tests/orchestrator/workflows/core/test_events.py
@@ -15,12 +15,13 @@
 
 import pytest
 
-from tests import mock, storage
-from aria.modeling.service_instance import NodeBase
 from aria.orchestrator.decorators import operation, workflow
-from aria.orchestrator.workflows.core import engine
+from aria.orchestrator.workflows.core import engine, compile
 from aria.orchestrator.workflows.executor.thread import ThreadExecutor
 from aria.orchestrator.workflows import api
+from aria.modeling.service_instance import NodeBase
+
+from tests import mock, storage
 
 global_test_dict = {}  # used to capture transitional node state changes
 
@@ -112,14 +113,13 @@ def run_operation_on_node(ctx, op_name, interface_name):
         operation_name=op_name,
         operation_kwargs=dict(function='{name}.{func.__name__}'.format(name=__name__, func=func)))
     node.interfaces[interface.name] = interface
+    compile.create_execution_tasks(
+        ctx,
+        single_operation_workflow(ctx, node=node, interface_name=interface_name, op_name=op_name),
+        ThreadExecutor)
 
-    eng = engine.Engine(executor=ThreadExecutor(),
-                        workflow_context=ctx,
-                        tasks_graph=single_operation_workflow(ctx=ctx,  # pylint: disable=no-value-for-parameter
-                                                              node=node,
-                                                              interface_name=interface_name,
-                                                              op_name=op_name))
-    eng.execute()
+    eng = engine.Engine(executors={ThreadExecutor: ThreadExecutor()})
+    eng.execute(ctx)
     return node
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1fee85c4/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index c0d3616..2b3f7d7 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -22,9 +22,9 @@ import pytest
 from aria.orchestrator.context import workflow as workflow_context
 from aria.orchestrator.workflows import (
     api,
-    core,
     exceptions,
 )
+from aria.modeling import models
 
 from tests import mock, storage
 
@@ -70,8 +70,8 @@ class TestOperationTask(object):
                 node,
                 interface_name=NODE_INTERFACE_NAME,
                 operation_name=NODE_OPERATION_NAME)
-            core_task = core.task.OperationTask(api_task=api_task, executor=None)
-        return api_task, core_task
+            model_task = models.Task.from_api_task(api_task, None)
+        return api_task, model_task
 
     def _create_relationship_operation_task(self, ctx, relationship):
         with workflow_context.current.push(ctx):
@@ -79,7 +79,7 @@ class TestOperationTask(object):
                 relationship,
                 interface_name=RELATIONSHIP_INTERFACE_NAME,
                 operation_name=RELATIONSHIP_OPERATION_NAME)
-            core_task = core.task.OperationTask(api_task=api_task, executor=None)
+            core_task = models.Task.from_api_task(api_task, None)
         return api_task, core_task
 
     def test_node_operation_task_creation(self, ctx):
@@ -96,25 +96,21 @@ class TestOperationTask(object):
         )
         node.interfaces[interface.name] = interface
         ctx.model.node.update(node)
-        api_task, core_task = self._create_node_operation_task(ctx, node)
-        storage_task = ctx.model.task.get_by_name(core_task.name)
-        assert storage_task.plugin is storage_plugin
-        assert storage_task.execution_name == ctx.execution.name
-        assert storage_task.actor == core_task.context.node
-        assert core_task.model_task == storage_task
-        assert core_task.name == api_task.name
-        assert core_task.function == api_task.function
-        assert core_task.actor == api_task.actor == node
-        assert core_task.arguments == api_task.arguments == storage_task.arguments
-        assert core_task.plugin == storage_plugin
+        api_task, model_task = self._create_node_operation_task(ctx, node)
+        assert model_task.name == api_task.name
+        assert model_task.function == api_task.function
+        assert model_task.actor == api_task.actor == node
+        assert model_task.arguments == api_task.arguments
+        assert model_task.plugin == storage_plugin
 
     def test_relationship_operation_task_creation(self, ctx):
         relationship = ctx.model.relationship.list()[0]
         ctx.model.relationship.update(relationship)
-        _, core_task = self._create_relationship_operation_task(
+        _, model_task = self._create_relationship_operation_task(
             ctx, relationship)
-        assert core_task.model_task.actor == relationship
+        assert model_task.actor == relationship
 
+    @pytest.mark.skip("Currently not supported for model tasks")
     def test_operation_task_edit_locked_attribute(self, ctx):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
@@ -131,6 +127,7 @@ class TestOperationTask(object):
         with pytest.raises(exceptions.TaskException):
             core_task.due_at = now
 
+    @pytest.mark.skip("Currently not supported for model tasks")
     def test_operation_task_edit_attributes(self, ctx):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)