You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@cloudstack.apache.org by ed...@apache.org on 2014/09/17 03:06:08 UTC

[2/3] git commit: updated refs/heads/pytest to 797fff1

add xdist


Project: http://git-wip-us.apache.org/repos/asf/cloudstack/repo
Commit: http://git-wip-us.apache.org/repos/asf/cloudstack/commit/145542d6
Tree: http://git-wip-us.apache.org/repos/asf/cloudstack/tree/145542d6
Diff: http://git-wip-us.apache.org/repos/asf/cloudstack/diff/145542d6

Branch: refs/heads/pytest
Commit: 145542d662518266c8af86e59f70fed6eafa3c18
Parents: 9610685
Author: Edison Su <su...@gmail.com>
Authored: Tue Sep 16 11:45:55 2014 -0700
Committer: Edison Su <su...@gmail.com>
Committed: Tue Sep 16 11:45:55 2014 -0700

----------------------------------------------------------------------
 test/integration/smoke/test_CS-18306.py         |   8 +
 .../marvin/pytest/pytest_marvin_plugin.py       |  10 +-
 tools/pytest-xdist/CHANGELOG                    | 134 +++++
 tools/pytest-xdist/ISSUES.txt                   |  31 ++
 tools/pytest-xdist/LICENSE                      |  19 +
 tools/pytest-xdist/MANIFEST.in                  |   7 +
 tools/pytest-xdist/README.txt                   | 212 ++++++++
 tools/pytest-xdist/example/boxed.txt            |  62 +++
 tools/pytest-xdist/setup.py                     |  30 ++
 tools/pytest-xdist/testing/acceptance_test.py   | 486 +++++++++++++++++++
 tools/pytest-xdist/testing/conftest.py          |  36 ++
 tools/pytest-xdist/testing/test_boxed.py        |  56 +++
 tools/pytest-xdist/testing/test_dsession.py     | 240 +++++++++
 tools/pytest-xdist/testing/test_looponfail.py   | 280 +++++++++++
 tools/pytest-xdist/testing/test_plugin.py       |  70 +++
 tools/pytest-xdist/testing/test_remote.py       | 248 ++++++++++
 tools/pytest-xdist/testing/test_slavemanage.py  | 241 +++++++++
 tools/pytest-xdist/tox.ini                      |  32 ++
 tools/pytest-xdist/xdist/__init__.py            |   2 +
 tools/pytest-xdist/xdist/dsession.py            | 460 ++++++++++++++++++
 tools/pytest-xdist/xdist/looponfail.py          | 230 +++++++++
 tools/pytest-xdist/xdist/newhooks.py            |  21 +
 tools/pytest-xdist/xdist/plugin.py              | 131 +++++
 tools/pytest-xdist/xdist/remote.py              | 147 ++++++
 tools/pytest-xdist/xdist/slavemanage.py         | 316 ++++++++++++
 25 files changed, 3505 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/test/integration/smoke/test_CS-18306.py
----------------------------------------------------------------------
diff --git a/test/integration/smoke/test_CS-18306.py b/test/integration/smoke/test_CS-18306.py
index 416cb5e..cb555ee 100644
--- a/test/integration/smoke/test_CS-18306.py
+++ b/test/integration/smoke/test_CS-18306.py
@@ -17,6 +17,14 @@
 # under the License.
 import pytest
 
+'''
 @pytest.mark.tags(tags=["advanced"], required_hardware="false")
 def test_01_create_disk_offering(vm):
     assert vm is not None
+'''
+
+def test_a():
+    assert True == True
+
+def test_b():
+    assert True == True
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/marvin/marvin/pytest/pytest_marvin_plugin.py
----------------------------------------------------------------------
diff --git a/tools/marvin/marvin/pytest/pytest_marvin_plugin.py b/tools/marvin/marvin/pytest/pytest_marvin_plugin.py
index b5a5ae4..abe844b 100644
--- a/tools/marvin/marvin/pytest/pytest_marvin_plugin.py
+++ b/tools/marvin/marvin/pytest/pytest_marvin_plugin.py
@@ -16,7 +16,6 @@
 # under the License.
 import pytest
 import os
-import distutils
 
 from marvin.utils import initTestClass,getMarvin
 from .VM import (vm,tiny_service_offering,template,test_client,account,domain,zone)
@@ -25,9 +24,6 @@ def pytest_configure(config):
     config.addinivalue_line("markers",
         "tags(name): tag tests")
 
-    result = getMarvin()
-    if result is None:
-        pytest.fail("failed to init marvin plugin")
     marvin_init_tags()
 
 g_marvin_filter = {
@@ -66,6 +62,12 @@ def pytest_runtest_setup(item):
         if found is not True:
             pytest.skip("doesn't match tags")
 
+@pytest.fixture(scope="session", autouse=True)
+def marvin_init_session():
+    result = getMarvin()
+    if result is None:
+        pytest.fail("failed to init marvin plugin")
+
 @pytest.fixture(scope="class", autouse=True)
 def marvin_inject_testclass(request):
     if request.cls is None:

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/CHANGELOG
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/CHANGELOG b/tools/pytest-xdist/CHANGELOG
new file mode 100644
index 0000000..4e20fce
--- /dev/null
+++ b/tools/pytest-xdist/CHANGELOG
@@ -0,0 +1,134 @@
+1.11
+-------------------------
+
+- fix pytest/xdist issue485 (also depends on py-1.4.22):
+  attach stdout/stderr on --boxed processes that die.
+
+- fix pytest/xdist issue503: make sure that a node has usually
+  two items to execute to avoid scoped fixtures to be torn down
+  pre-maturely (fixture teardown/setup is "nextitem" sensitive).
+  Thanks to Andreas Pelme for bug analysis and failing test.
+
+1.10
+-------------------------
+
+- add glob support for rsyncignores, add command line option to pass
+  additional rsyncignores. Thanks Anatoly Bubenkov.
+
+- fix pytest issue382 - produce "pytest_runtest_logstart" event again
+  in master. Thanks Aron Curzon.
+
+- fix pytest issue419 by sending/receiving indices into the test 
+  collection instead of node ids (which are not neccessarily unique
+  for functions parametrized with duplicate values)
+
+- send multiple "to test" indices in one network message to a slave
+  and improve heuristics for sending chunks where the chunksize
+  depends on the number of remaining tests rather than fixed numbers.
+  This reduces the number of master -> node messages (but not the
+  reverse direction)
+
+
+1.9
+-------------------------
+
+- changed LICENSE to MIT
+
+- fix duplicate reported test ids with --looponfailing
+  (thanks Jeremy Thurgood)
+
+- fix pytest issue41: re-run tests on all file changes, not just
+  randomly select ones like .py/.c.
+
+- fix pytest issue347: slaves running on top of Python3.2
+  will set PYTHONDONTWRITEYBTECODE to 1 to avoid import concurrency
+  bugs.
+
+1.8
+-------------------------
+
+- fix pytest-issue93 - use the refined pytest-2.2.1 runtestprotocol
+  interface to perform eager teardowns for test items.
+
+1.7
+-------------------------
+
+- fix incompatibilities with pytest-2.2.0 (allow multiple
+  pytest_runtest_logreport reports for a test item)
+
+1.6
+-------------------------
+
+- terser collection reporting
+
+- fix issue34 - distributed testing with -p plugin now works correctly
+
+- fix race condition in looponfail mode where a concurrent file removal
+  could cause a crash
+
+1.5
+-------------------------
+
+- adapt to and require pytest-2.0 changes, rsyncdirs and rsyncignore can now
+  only be specified in [pytest] sections of ini files, see "py.test -h"
+  for details.
+- major internal refactoring to match the pytest-2.0 event refactoring
+  - perform test collection always at slave side instead of at the master
+  - make python2/python3 bridging work, remove usage of pickling
+- improve initial reporting by using line-rewriting
+- remove all trailing whitespace from source
+
+1.4
+-------------------------
+
+- perform distributed testing related reporting in the plugin
+  rather than having dist-related code in the generic py.test
+  distribution
+
+- depend on execnet-1.0.7 which adds "env1:NAME=value" keys to
+  gateway specification strings.
+
+- show detailed gateway setup and platform information only when
+  "-v" or "--verbose" is specified.
+
+1.3
+-------------------------
+
+- fix --looponfailing - it would not actually run against the fully changed
+  source tree when initial conftest files load application state.
+
+- adapt for py-1.3.1's new --maxfailure option
+
+1.2
+-------------------------
+
+- fix issue79: sessionfinish/teardown hooks are now called systematically
+  on the slave side
+- introduce a new data input/output mechanism to allow the master side
+  to send and receive data from a slave.
+- fix race condition in underlying pickling/unpickling handling
+- use and require new register hooks facility of py.test>=1.3.0
+- require improved execnet>=1.0.6 because of various race conditions
+  that can arise in xdist testing modes.
+- fix some python3 related pickling related race conditions
+- fix PyPI description
+
+1.1
+-------------------------
+
+- fix an indefinite hang which would wait for events although no events
+  are pending - this happened if items arrive very quickly while
+  the "reschedule-event" tried unconditionally avoiding a busy-loop
+  and not schedule new work.
+
+1.0
+-------------------------
+
+- moved code out of py-1.1.1 into its own plugin
+- use a new, faster and more sensible model to do load-balancing
+  of tests - now no magic "MAXITEMSPERHOST" is needed and load-testing
+  works effectively even with very few tests.
+- cleaned up termination handling
+- make -x cause hard killing of test nodes to decrease wait time
+  until the traceback shows up on first failure
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/ISSUES.txt
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/ISSUES.txt b/tools/pytest-xdist/ISSUES.txt
new file mode 100644
index 0000000..7fe4579
--- /dev/null
+++ b/tools/pytest-xdist/ISSUES.txt
@@ -0,0 +1,31 @@
+next release critical
+-----------------------------------------------
+tag: bug
+
+miserably fails: --dist=each --tx popen --tx socket=...
+
+
+rename / hooks
+-----------------------------------------------
+tag: bug
+
+node -> slave
+transition for hooks?
+configure_node -> configure_slave
+
+allow to remotely run xdist tests with xdist
+-----------------------------------------------
+tag: feature
+
+allow to run xdist own tests using its own mechanism.
+currently this doesn't work because the remote side
+has no py.test plugin.   How to configure/do
+register "xdist.plugin" on the remote side?
+
+see to avoid any "from _pytest" internal imports
+-----------------------------------------------
+tag: feature
+
+currently tests and even xdist core code imports
+names from the internal _pytest namespace.
+See to avoid it.

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/LICENSE
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/LICENSE b/tools/pytest-xdist/LICENSE
new file mode 100644
index 0000000..31ecdfb
--- /dev/null
+++ b/tools/pytest-xdist/LICENSE
@@ -0,0 +1,19 @@
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+     
+  The above copyright notice and this permission notice shall be included in all
+  copies or substantial portions of the Software.
+ 
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+  SOFTWARE.
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/MANIFEST.in
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/MANIFEST.in b/tools/pytest-xdist/MANIFEST.in
new file mode 100644
index 0000000..cde85f9
--- /dev/null
+++ b/tools/pytest-xdist/MANIFEST.in
@@ -0,0 +1,7 @@
+include CHANGELOG
+include LICENSE 
+include README.txt
+include setup.py
+include tox.ini
+graft testing
+prune .hg 

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/README.txt
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/README.txt b/tools/pytest-xdist/README.txt
new file mode 100644
index 0000000..39e76e8
--- /dev/null
+++ b/tools/pytest-xdist/README.txt
@@ -0,0 +1,212 @@
+xdist: pytest distributed testing plugin
+===============================================================
+
+The `pytest-xdist`_ plugin extends py.test with some unique
+test execution modes:
+
+* test run parallelization_: if you have multiple CPUs or hosts you can use
+  those for a combined test run.  This allows to speed up
+  development or to use special resources of `remote machines`_.
+
+* ``--boxed``: (not available on Windows) run each test in a boxed_
+  subprocess to survive ``SEGFAULTS`` or otherwise dying processes
+
+* ``--looponfail``: run your tests repeatedly in a subprocess.  After each run
+  py.test waits until a file in your project changes and then re-runs
+  the previously failing tests.  This is repeated until all tests pass
+  after which again a full run is performed.
+
+* `Multi-Platform`_ coverage: you can specify different Python interpreters
+  or different platforms and run tests in parallel on all of them.
+
+Before running tests remotely, ``py.test`` efficiently "rsyncs" your
+program source code to the remote place.  All test results
+are reported back and displayed to your local terminal.
+You may specify different Python versions and interpreters.
+
+
+Installation
+-----------------------
+
+Install the plugin with::
+
+    easy_install pytest-xdist
+
+    # or
+
+    pip install pytest-xdist
+
+or use the package in develope/in-place mode with
+a checkout of the `pytest-xdist repository`_ ::
+
+    python setup.py develop
+
+Usage examples
+---------------------
+
+.. _parallelization:
+
+Speed up test runs by sending tests to multiple CPUs
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+To send tests to multiple CPUs, type::
+
+    py.test -n NUM
+
+Especially for longer running tests or tests requiring
+a lot of IO this can lead to considerable speed ups.
+
+
+Running tests in a Python subprocess
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+To instantiate a python2.5 sub process and send tests to it, you may type::
+
+    py.test -d --tx popen//python=python2.5
+
+This will start a subprocess which is run with the "python2.5"
+Python interpreter, found in your system binary lookup path.
+
+If you prefix the --tx option value like this::
+
+    --tx 3*popen//python=python2.5
+
+then three subprocesses would be created and tests
+will be load-balanced across these three processes.
+
+.. _boxed:
+
+Running tests in a boxed subprocess
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+If you have tests involving C or C++ libraries you might have to deal
+with tests crashing the process.  For this case you may use the boxing
+options::
+
+    py.test --boxed
+
+which will run each test in a subprocess and will report if a test
+crashed the process.  You can also combine this option with
+running multiple processes to speed up the test run and use your CPU cores::
+
+    py.test -n3 --boxed
+
+this would run 3 testing subprocesses in parallel which each
+create new boxed subprocesses for each test.
+
+
+.. _`remote machines`:
+
+Sending tests to remote SSH accounts
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Suppose you have a package ``mypkg`` which contains some
+tests that you can successfully run locally. And you
+have a ssh-reachable machine ``myhost``.  Then
+you can ad-hoc distribute your tests by typing::
+
+    py.test -d --tx ssh=myhostpopen --rsyncdir mypkg mypkg
+
+This will synchronize your ``mypkg`` package directory
+to an remote ssh account and then locally collect tests
+and send them to remote places for execution.
+
+You can specify multiple ``--rsyncdir`` directories
+to be sent to the remote side.
+
+**NOTE:** For py.test to collect and send tests correctly
+you not only need to make sure all code and tests
+directories are rsynced, but that any test (sub) directory
+also has an ``__init__.py`` file because internally
+py.test references tests as a fully qualified python
+module path.  **You will otherwise get strange errors**
+during setup of the remote side.
+
+You can specify multiple ``--rsyncignore`` glob-patterns
+to be ignored when file are sent to the remote side.
+There are also internal ignores: .*, *.pyc, *.pyo, *~
+Those you cannot override using rsyncignore command-line or
+ini-file option(s).
+
+
+Sending tests to remote Socket Servers
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Download the single-module `socketserver.py`_ Python program
+and run it like this::
+
+    python socketserver.py
+
+It will tell you that it starts listening on the default
+port.  You can now on your home machine specify this
+new socket host with something like this::
+
+    py.test -d --tx socket=192.168.1.102:8888 --rsyncdir mypkg mypkg
+
+
+.. _`atonce`:
+.. _`Multi-Platform`:
+
+
+Running tests on many platforms at once
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+The basic command to run tests on multiple platforms is::
+
+    py.test --dist=each --tx=spec1 --tx=spec2
+
+If you specify a windows host, an OSX host and a Linux
+environment this command will send each tests to all
+platforms - and report back failures from all platforms
+at once.   The specifications strings use the `xspec syntax`_.
+
+.. _`xspec syntax`: http://codespeak.net/execnet/trunk/basics.html#xspec
+
+.. _`socketserver.py`: http://bitbucket.org/hpk42/execnet/raw/2af991418160/execnet/script/socketserver.py
+
+.. _`execnet`: http://codespeak.net/execnet
+
+Specifying test exec environments in an ini file
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+pytest (since version 2.0) supports ini-style cofiguration.
+You can for example make running with three subprocesses
+your default like this::
+
+    [pytest]
+    addopts = -n3
+
+You can also add default environments like this::
+
+    [pytest]
+    addopts = --tx ssh=myhost//python=python2.5 --tx ssh=myhost//python=python2.6
+
+and then just type::
+
+    py.test --dist=each
+
+to run tests in each of the environments.
+
+Specifying "rsync" dirs in an ini-file
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+In a ``tox.ini`` or ``setup.cfg`` file in your root project directory
+you may specify directories to include or to exclude in synchronisation::
+
+    [pytest]
+    rsyncdirs = . mypkg helperpkg
+    rsyncignore = .hg
+
+These directory specifications are relative to the directory
+where the configuration file was found.
+
+.. _`pytest-xdist`: http://pypi.python.org/pypi/pytest-xdist
+.. _`pytest-xdist repository`: http://bitbucket.org/hpk42/pytest-xdist
+.. _`pytest`: http://pytest.org
+
+Issue and Bug Tracker
+------------------------
+
+Please use the pytest issue tracker for bugs in this plugin, see https://bitbucket.org/hpk42/pytest/issues .
+
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/example/boxed.txt
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/example/boxed.txt b/tools/pytest-xdist/example/boxed.txt
new file mode 100644
index 0000000..af3b025
--- /dev/null
+++ b/tools/pytest-xdist/example/boxed.txt
@@ -0,0 +1,62 @@
+
+
+If your testing involves C or C++ libraries you might have to deal
+with crashing processes.  The xdist-plugin provides the ``--boxed`` option
+to run each test in a controled subprocess.  Here is a basic example::
+
+    # content of test_module.py
+
+    import pytest
+    import os
+    import time
+
+    # run test function 50 times with different argument
+    @pytest.mark.parametrize("arg", range(50))
+    def test_func(arg):
+        time.sleep(0.05) # each tests takes a while
+        if arg % 19 == 0: 
+            os.kill(os.getpid(), 15)
+
+If you run this with::
+
+    $ py.test --boxed
+    =========================== test session starts ============================
+    platform linux2 -- Python 2.7.3 -- pytest-2.3.0.dev8
+    plugins: xdist, bugzilla, cache, oejskit, cli, pep8, cov
+    collecting ... collected 50 items
+    
+    test_module.py f..................f..................f...........
+    
+    ================================= FAILURES =================================
+    _______________________________ test_func[0] _______________________________
+    /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15
+    ______________________________ test_func[19] _______________________________
+    /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15
+    ______________________________ test_func[38] _______________________________
+    /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15
+    =================== 3 failed, 47 passed in 3.41 seconds ====================
+
+You'll see that a couple of tests are reported as crashing, indicated
+by lower-case ``f`` and the respective failure summary. You can also use
+the xdist-provided parallelization feature to speed up your testing::
+
+    $ py.test --boxed -n3
+    =========================== test session starts ============================
+    platform linux2 -- Python 2.7.3 -- pytest-2.3.0.dev8
+    plugins: xdist, bugzilla, cache, oejskit, cli, pep8, cov
+    gw0 I / gw1 I / gw2 I
+    gw0 [50] / gw1 [50] / gw2 [50]
+    
+    scheduling tests via LoadScheduling
+    ..f...............f..................f............
+    ================================= FAILURES =================================
+    _______________________________ test_func[0] _______________________________
+    [gw0] linux2 -- Python 2.7.3 /home/hpk/venv/1/bin/python
+    /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15
+    ______________________________ test_func[19] _______________________________
+    [gw2] linux2 -- Python 2.7.3 /home/hpk/venv/1/bin/python
+    /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15
+    ______________________________ test_func[38] _______________________________
+    [gw2] linux2 -- Python 2.7.3 /home/hpk/venv/1/bin/python
+    /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15
+    =================== 3 failed, 47 passed in 2.03 seconds ====================

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/setup.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/setup.py b/tools/pytest-xdist/setup.py
new file mode 100644
index 0000000..26bb42e
--- /dev/null
+++ b/tools/pytest-xdist/setup.py
@@ -0,0 +1,30 @@
+from setuptools import setup
+
+setup(
+    name="pytest-xdist",
+    version='1.11',
+    description='py.test xdist plugin for distributed testing and loop-on-failing modes',
+    long_description=open('README.txt').read(),
+    license='MIT',
+    author='holger krekel and contributors',
+    author_email='pytest-dev@python.org,holger@merlinux.eu',
+    url='http://bitbucket.org/hpk42/pytest-xdist',
+    platforms=['linux', 'osx', 'win32'],
+    packages = ['xdist'],
+    entry_points = {'pytest11': ['xdist = xdist.plugin'],},
+    zip_safe=False,
+    install_requires = ['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
+    classifiers=[
+    'Development Status :: 5 - Production/Stable',
+    'Intended Audience :: Developers',
+    'License :: OSI Approved :: MIT License',
+    'Operating System :: POSIX',
+    'Operating System :: Microsoft :: Windows',
+    'Operating System :: MacOS :: MacOS X',
+    'Topic :: Software Development :: Testing',
+    'Topic :: Software Development :: Quality Assurance',
+    'Topic :: Utilities',
+    'Programming Language :: Python',
+    'Programming Language :: Python :: 3',
+    ],
+)

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/acceptance_test.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/acceptance_test.py b/tools/pytest-xdist/testing/acceptance_test.py
new file mode 100644
index 0000000..ea6ccce
--- /dev/null
+++ b/tools/pytest-xdist/testing/acceptance_test.py
@@ -0,0 +1,486 @@
+import py
+
+class TestDistribution:
+    def test_n1_pass(self, testdir):
+        p1 = testdir.makepyfile("""
+            def test_ok():
+                pass
+        """)
+        result = testdir.runpytest(p1, "-n1")
+        assert result.ret == 0
+        result.stdout.fnmatch_lines([
+            "*1 passed*",
+        ])
+
+    def test_n1_fail(self, testdir):
+        p1 = testdir.makepyfile("""
+            def test_fail():
+                assert 0
+        """)
+        result = testdir.runpytest(p1, "-n1")
+        assert result.ret == 1
+        result.stdout.fnmatch_lines([
+            "*1 failed*",
+        ])
+
+    def test_n1_import_error(self, testdir):
+        p1 = testdir.makepyfile("""
+            import __import_of_missing_module
+            def test_import():
+                pass
+        """)
+        result = testdir.runpytest(p1, "-n1")
+        assert result.ret == 1
+        result.stdout.fnmatch_lines([
+            "E   ImportError: *__import_of_missing_module*",
+        ])
+
+    def test_n2_import_error(self, testdir):
+        """Check that we don't report the same import error multiple times
+        in distributed mode."""
+        p1 = testdir.makepyfile("""
+            import __import_of_missing_module
+            def test_import():
+                pass
+        """)
+        result1 = testdir.runpytest(p1, "-n2")
+        result2 = testdir.runpytest(p1, "-n1")
+        assert len(result1.stdout.lines) == len(result2.stdout.lines)
+
+    def test_n1_skip(self, testdir):
+        p1 = testdir.makepyfile("""
+            def test_skip():
+                import py
+                py.test.skip("myreason")
+        """)
+        result = testdir.runpytest(p1, "-n1")
+        assert result.ret == 0
+        result.stdout.fnmatch_lines([
+            "*1 skipped*",
+        ])
+
+    def test_manytests_to_one_import_error(self, testdir):
+        p1 = testdir.makepyfile("""
+            import __import_of_missing_module
+            def test_import():
+                pass
+        """)
+        result = testdir.runpytest(p1, '--tx=popen', '--tx=popen')
+        assert result.ret == 1
+        result.stdout.fnmatch_lines([
+            "E   ImportError: *__import_of_missing_module*",
+        ])
+
+    def test_manytests_to_one_popen(self, testdir):
+        p1 = testdir.makepyfile("""
+                import py
+                def test_fail0():
+                    assert 0
+                def test_fail1():
+                    raise ValueError()
+                def test_ok():
+                    pass
+                def test_skip():
+                    py.test.skip("hello")
+            """,
+        )
+        result = testdir.runpytest(p1, "-v", '-d', '--tx=popen', '--tx=popen')
+        result.stdout.fnmatch_lines([
+            "*1*Python*",
+            "*2 failed, 1 passed, 1 skipped*",
+        ])
+        assert result.ret == 1
+
+    def test_n1_fail_minus_x(self, testdir):
+        p1 = testdir.makepyfile("""
+            def test_fail1():
+                assert 0
+            def test_fail2():
+                assert 0
+        """)
+        result = testdir.runpytest(p1, "-x", "-v", "-n1")
+        assert result.ret == 2
+        result.stdout.fnmatch_lines([
+            "*Interrupted: stopping*1*",
+            "*1 failed*",
+        ])
+
+    def test_basetemp_in_subprocesses(self, testdir):
+        p1 = testdir.makepyfile("""
+            def test_send(tmpdir):
+                import py
+                assert tmpdir.relto(py.path.local(%r)), tmpdir
+        """ % str(testdir.tmpdir))
+        result = testdir.runpytest(p1, "-n1")
+        assert result.ret == 0
+        result.stdout.fnmatch_lines([
+            "*1 passed*",
+        ])
+
+    def test_dist_ini_specified(self, testdir):
+        p1 = testdir.makepyfile("""
+                import py
+                def test_fail0():
+                    assert 0
+                def test_fail1():
+                    raise ValueError()
+                def test_ok():
+                    pass
+                def test_skip():
+                    py.test.skip("hello")
+            """,
+        )
+        testdir.makeini("""
+            [pytest]
+            addopts = --tx=3*popen
+        """)
+        result = testdir.runpytest(p1, '-d', "-v")
+        result.stdout.fnmatch_lines([
+            "*2*Python*",
+            "*2 failed, 1 passed, 1 skipped*",
+        ])
+        assert result.ret == 1
+
+    @py.test.mark.xfail("sys.platform.startswith('java')", run=False)
+    def test_dist_tests_with_crash(self, testdir):
+        if not hasattr(py.std.os, 'kill'):
+            py.test.skip("no os.kill")
+
+        p1 = testdir.makepyfile("""
+                import py
+                def test_fail0():
+                    assert 0
+                def test_fail1():
+                    raise ValueError()
+                def test_ok():
+                    pass
+                def test_skip():
+                    py.test.skip("hello")
+                def test_crash():
+                    import time
+                    import os
+                    time.sleep(0.5)
+                    os.kill(os.getpid(), 15)
+            """
+        )
+        result = testdir.runpytest(p1, "-v", '-d', '-n1')
+        result.stdout.fnmatch_lines([
+            "*Python*",
+            "*PASS**test_ok*",
+            "*node*down*",
+            "*3 failed, 1 passed, 1 skipped*"
+        ])
+        assert result.ret == 1
+
+    def test_distribution_rsyncdirs_example(self, testdir):
+        source = testdir.mkdir("source")
+        dest = testdir.mkdir("dest")
+        subdir = source.mkdir("example_pkg")
+        subdir.ensure("__init__.py")
+        p = subdir.join("test_one.py")
+        p.write("def test_5():\n  assert not __file__.startswith(%r)" % str(p))
+        result = testdir.runpytest("-v", "-d",
+            "--rsyncdir=%(subdir)s" % locals(),
+            "--tx=popen//chdir=%(dest)s" % locals(), p)
+        assert result.ret == 0
+        result.stdout.fnmatch_lines([
+            "*0* *cwd*",
+            #"RSyncStart: [G1]",
+            #"RSyncFinished: [G1]",
+            "*1 passed*"
+        ])
+        assert dest.join(subdir.basename).check(dir=1)
+
+    def test_data_exchange(self, testdir):
+        testdir.makeconftest("""
+            # This hook only called on master.
+            def pytest_configure_node(node):
+                node.slaveinput['a'] = 42
+                node.slaveinput['b'] = 7
+
+            def pytest_configure(config):
+                # this attribute is only set on slaves
+                if hasattr(config, 'slaveinput'):
+                    a = config.slaveinput['a']
+                    b = config.slaveinput['b']
+                    r = a + b
+                    config.slaveoutput['r'] = r
+
+            # This hook only called on master.
+            def pytest_testnodedown(node, error):
+                node.config.calc_result = node.slaveoutput['r']
+
+            def pytest_terminal_summary(terminalreporter):
+                if not hasattr(terminalreporter.config, 'slaveinput'):
+                    calc_result = terminalreporter.config.calc_result
+                    terminalreporter._tw.sep('-',
+                        'calculated result is %s' % calc_result)
+        """)
+        p1 = testdir.makepyfile("def test_func(): pass")
+        result = testdir.runpytest("-v", p1, '-d', '--tx=popen')
+        result.stdout.fnmatch_lines([
+            "*0*Python*",
+            "*calculated result is 49*",
+            "*1 passed*"
+        ])
+        assert result.ret == 0
+
+    def test_keyboardinterrupt_hooks_issue79(self, testdir):
+        testdir.makepyfile(__init__="", test_one="""
+            def test_hello():
+                raise KeyboardInterrupt()
+        """)
+        testdir.makeconftest("""
+            def pytest_sessionfinish(session):
+                # on the slave
+                if hasattr(session.config, 'slaveoutput'):
+                    session.config.slaveoutput['s2'] = 42
+            # on the master
+            def pytest_testnodedown(node, error):
+                assert node.slaveoutput['s2'] == 42
+                print ("s2call-finished")
+        """)
+        args = ["-n1", "--debug"]
+        result = testdir.runpytest(*args)
+        s = result.stdout.str()
+        assert result.ret == 2
+        assert 's2call' in s
+        assert "Interrupted" in s
+
+    def test_keyboard_interrupt_dist(self, testdir):
+        # xxx could be refined to check for return code
+        testdir.makepyfile("""
+            def test_sleep():
+                import time
+                time.sleep(10)
+        """)
+        child = testdir.spawn_pytest("-n1")
+        py.std.time.sleep(0.1)
+        child.expect(".*test session starts.*")
+        child.kill(2) # keyboard interrupt
+        child.expect(".*KeyboardInterrupt.*")
+        #child.expect(".*seconds.*")
+        child.close()
+        #assert ret == 2
+
+class TestDistEach:
+    def test_simple(self, testdir):
+        testdir.makepyfile("""
+            def test_hello():
+                pass
+        """)
+        result = testdir.runpytest("--debug", "--dist=each", "--tx=2*popen")
+        assert not result.ret
+        result.stdout.fnmatch_lines(["*2 pass*"])
+
+    @py.test.mark.xfail(run=False,
+        reason="other python versions might not have py.test installed")
+    def test_simple_diffoutput(self, testdir):
+        interpreters = []
+        for name in ("python2.5", "python2.6"):
+            interp = py.path.local.sysfind(name)
+            if interp is None:
+                py.test.skip("%s not found" % name)
+            interpreters.append(interp)
+
+        testdir.makepyfile(__init__="", test_one="""
+            import sys
+            def test_hello():
+                print("%s...%s" % sys.version_info[:2])
+                assert 0
+        """)
+        args = ["--dist=each", "-v"]
+        args += ["--tx", "popen//python=%s" % interpreters[0]]
+        args += ["--tx", "popen//python=%s" % interpreters[1]]
+        result = testdir.runpytest(*args)
+        s = result.stdout.str()
+        assert "2...5" in s
+        assert "2...6" in s
+
+class TestTerminalReporting:
+    def test_pass_skip_fail(self, testdir):
+        testdir.makepyfile("""
+            import py
+            def test_ok():
+                pass
+            def test_skip():
+                py.test.skip("xx")
+            def test_func():
+                assert 0
+        """)
+        result = testdir.runpytest("-n1", "-v")
+        result.stdout.fnmatch_lines_random([
+            "*PASS*test_pass_skip_fail.py*test_ok*",
+            "*SKIP*test_pass_skip_fail.py*test_skip*",
+            "*FAIL*test_pass_skip_fail.py*test_func*",
+        ])
+        result.stdout.fnmatch_lines([
+            "*def test_func():",
+            ">       assert 0",
+            "E       assert 0",
+        ])
+
+    def test_fail_platinfo(self, testdir):
+        testdir.makepyfile("""
+            def test_func():
+                assert 0
+        """)
+        result = testdir.runpytest("-n1", "-v")
+        result.stdout.fnmatch_lines([
+            "*FAIL*test_fail_platinfo.py*test_func*",
+            "*0*Python*",
+            "*def test_func():",
+            ">       assert 0",
+            "E       assert 0",
+        ])
+
+def test_teardownfails_one_function(testdir):
+    p = testdir.makepyfile("""
+        def test_func():
+            pass
+        def teardown_function(function):
+            assert 0
+    """)
+    result = testdir.runpytest(p, '-n1', '--tx=popen')
+    result.stdout.fnmatch_lines([
+        "*def teardown_function(function):*",
+        "*1 passed*1 error*"
+    ])
+
+@py.test.mark.xfail
+def test_terminate_on_hangingnode(testdir):
+    p = testdir.makeconftest("""
+        def pytest_sessionfinishes(session):
+            if session.nodeid == "my": # running on slave
+                import time
+                time.sleep(3)
+    """)
+    result = testdir.runpytest(p, '--dist=each', '--tx=popen//id=my')
+    assert result.duration < 2.0
+    result.stdout.fnmatch_lines([
+        "*killed*my*",
+    ])
+
+
+
+def test_session_hooks(testdir):
+    testdir.makeconftest("""
+        import sys
+        def pytest_sessionstart(session):
+            sys.pytestsessionhooks = session
+        def pytest_sessionfinish(session):
+            if hasattr(session.config, 'slaveinput'):
+                name = "slave"
+            else:
+                name = "master"
+            f = open(name, "w")
+            f.write("xy")
+            f.close()
+            # let's fail on the slave
+            if name == "slave":
+                raise ValueError(42)
+    """)
+    p = testdir.makepyfile("""
+        import sys
+        def test_hello():
+            assert hasattr(sys, 'pytestsessionhooks')
+    """)
+    result = testdir.runpytest(p, "--dist=each", "--tx=popen")
+    result.stdout.fnmatch_lines([
+        "*ValueError*",
+        "*1 passed*",
+    ])
+    assert not result.ret
+    d = result.parseoutcomes()
+    assert d['passed'] == 1
+    assert testdir.tmpdir.join("slave").check()
+    assert testdir.tmpdir.join("master").check()
+
+def test_funcarg_teardown_failure(testdir):
+    p = testdir.makepyfile("""
+        def pytest_funcarg__myarg(request):
+            def teardown(val):
+                raise ValueError(val)
+            return request.cached_setup(setup=lambda: 42, teardown=teardown,
+                scope="module")
+        def test_hello(myarg):
+            pass
+    """)
+    result = testdir.runpytest("--debug", p) # , "-n1")
+    result.stdout.fnmatch_lines([
+        "*ValueError*42*",
+        "*1 passed*1 error*",
+    ])
+    assert result.ret
+
+def test_crashing_item(testdir):
+    p = testdir.makepyfile("""
+        import py
+        import os
+        def test_crash():
+            py.process.kill(os.getpid())
+        def test_noncrash():
+            pass
+    """)
+    result = testdir.runpytest("-n2", p)
+    result.stdout.fnmatch_lines([
+        "*crashed*test_crash*",
+        "*1 failed*1 passed*"
+    ])
+
+
+
+def test_skipping(testdir):
+    p = testdir.makepyfile("""
+        import pytest
+        def test_crash():
+            pytest.skip("hello")
+    """)
+    result = testdir.runpytest("-n1", '-rs', p)
+    assert result.ret == 0
+    result.stdout.fnmatch_lines([
+        "*hello*",
+        "*1 skipped*"
+    ])
+
+def test_issue34_pluginloading_in_subprocess(testdir):
+    testdir.tmpdir.join("plugin123.py").write(py.code.Source("""
+        def pytest_namespace():
+            return {'sample_variable': 'testing'}
+    """))
+    testdir.makepyfile("""
+        import pytest
+        def test_hello():
+            assert pytest.sample_variable == "testing"
+    """)
+    result = testdir.runpytest("-n1", "-p", "plugin123")
+    assert result.ret == 0
+    result.stdout.fnmatch_lines([
+        "*1 passed*",
+    ])
+
+
+def test_fixture_scope_caching_issue503(testdir):
+    p1 = testdir.makepyfile("""
+            import pytest
+
+            @pytest.fixture(scope='session')
+            def fix():
+                assert fix.counter == 0, 'session fixture was invoked multiple times'
+                fix.counter += 1
+            fix.counter = 0
+
+            def test_a(fix):
+                pass
+
+            def test_b(fix):
+                pass
+    """)
+    result = testdir.runpytest(p1, '-v', '-n1')
+    assert result.ret == 0
+    result.stdout.fnmatch_lines([
+    "*2 passed*",
+    ])
+
+
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/conftest.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/conftest.py b/tools/pytest-xdist/testing/conftest.py
new file mode 100644
index 0000000..d805f8c
--- /dev/null
+++ b/tools/pytest-xdist/testing/conftest.py
@@ -0,0 +1,36 @@
+import py
+import execnet
+
+pytest_plugins = "pytester"
+
+#rsyncdirs = ['.', '../xdist', py.path.local(execnet.__file__).dirpath()]
+
+def pytest_addoption(parser):
+    parser.addoption('--gx',
+       action="append", dest="gspecs",
+       help=("add a global test environment, XSpec-syntax. "))
+
+def pytest_funcarg__specssh(request):
+    return getspecssh(request.config)
+
+# configuration information for tests
+def getgspecs(config):
+    return [execnet.XSpec(spec)
+                for spec in config.getvalueorskip("gspecs")]
+
+def getspecssh(config):
+    xspecs = getgspecs(config)
+    for spec in xspecs:
+        if spec.ssh:
+            if not py.path.local.sysfind("ssh"):
+                py.test.skip("command not found: ssh")
+            return str(spec)
+    py.test.skip("need '--gx ssh=...'")
+
+def getsocketspec(config):
+    xspecs = getgspecs(config)
+    for spec in xspecs:
+        if spec.socket:
+            return spec
+    py.test.skip("need '--gx socket=...'")
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/test_boxed.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/test_boxed.py b/tools/pytest-xdist/testing/test_boxed.py
new file mode 100644
index 0000000..bee9367
--- /dev/null
+++ b/tools/pytest-xdist/testing/test_boxed.py
@@ -0,0 +1,56 @@
+import pytest
+import os
+
+needsfork = pytest.mark.skipif(not hasattr(os, "fork"),
+                               reason="os.fork required")
+
+@needsfork
+def test_functional_boxed(testdir):
+    p1 = testdir.makepyfile("""
+        import os
+        def test_function():
+            os.kill(os.getpid(), 15)
+    """)
+    result = testdir.runpytest(p1, "--boxed")
+    result.stdout.fnmatch_lines([
+        "*CRASHED*",
+        "*1 failed*"
+    ])
+
+@needsfork
+@pytest.mark.parametrize("capmode", [
+    "no",
+    pytest.mark.xfail("sys", reason="capture cleanup needed"),
+    pytest.mark.xfail("fd", reason="capture cleanup needed")])
+def test_functional_boxed_capturing(testdir, capmode):
+    p1 = testdir.makepyfile("""
+        import os
+        import sys
+        def test_function():
+            sys.stdout.write("hello\\n")
+            sys.stderr.write("world\\n")
+            os.kill(os.getpid(), 15)
+    """)
+    result = testdir.runpytest(p1, "--boxed", "--capture=%s" % capmode)
+    result.stdout.fnmatch_lines("""
+        *CRASHED*
+        *stdout*
+        hello
+        *stderr*
+        world
+        *1 failed*
+    """)
+
+class TestOptionEffects:
+    def test_boxed_option_default(self, testdir):
+        tmpdir = testdir.tmpdir.ensure("subdir", dir=1)
+        config = testdir.parseconfig()
+        assert not config.option.boxed
+        pytest.importorskip("execnet")
+        config = testdir.parseconfig('-d', tmpdir)
+        assert not config.option.boxed
+
+    def test_is_not_boxed_by_default(self, testdir):
+        config = testdir.parseconfig(testdir.tmpdir)
+        assert not config.option.boxed
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/test_dsession.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/test_dsession.py b/tools/pytest-xdist/testing/test_dsession.py
new file mode 100644
index 0000000..90ba45b
--- /dev/null
+++ b/tools/pytest-xdist/testing/test_dsession.py
@@ -0,0 +1,240 @@
+from xdist.dsession import (
+    DSession,
+    LoadScheduling,
+    EachScheduling,
+    report_collection_diff,
+)
+import py
+import pytest
+import execnet
+
+XSpec = execnet.XSpec
+
+def run(item, node, excinfo=None):
+    runner = item.config.pluginmanager.getplugin("runner")
+    rep = runner.ItemTestReport(item=item,
+        excinfo=excinfo, when="call")
+    rep.node = node
+    return rep
+
+class MockGateway:
+    _count = 0
+    def __init__(self):
+        self.id = str(self._count)
+        self._count += 1
+
+class MockNode:
+    def __init__(self):
+        self.sent = []
+        self.gateway = MockGateway()
+
+    def send_runtest_some(self, indices):
+        self.sent.extend(indices)
+
+    def send_runtest_all(self):
+        self.sent.append("ALL")
+
+    def shutdown(self):
+        self._shutdown=True
+
+def dumpqueue(queue):
+    while queue.qsize():
+        print(queue.get())
+
+class TestEachScheduling:
+    def test_schedule_load_simple(self):
+        node1 = MockNode()
+        node2 = MockNode()
+        sched = EachScheduling(2)
+        sched.addnode(node1)
+        sched.addnode(node2)
+        collection = ["a.py::test_1", ]
+        assert not sched.collection_is_completed
+        sched.addnode_collection(node1, collection)
+        assert not sched.collection_is_completed
+        sched.addnode_collection(node2, collection)
+        assert sched.collection_is_completed
+        assert sched.node2collection[node1] == collection
+        assert sched.node2collection[node2] == collection
+        sched.init_distribute()
+        assert sched.tests_finished()
+        assert node1.sent == ['ALL']
+        assert node2.sent == ['ALL']
+        sched.remove_item(node1, 0)
+        assert sched.tests_finished()
+        sched.remove_item(node2, 0)
+        assert sched.tests_finished()
+
+    def test_schedule_remove_node(self):
+        node1 = MockNode()
+        sched = EachScheduling(1)
+        sched.addnode(node1)
+        collection = ["a.py::test_1", ]
+        assert not sched.collection_is_completed
+        sched.addnode_collection(node1, collection)
+        assert sched.collection_is_completed
+        assert sched.node2collection[node1] == collection
+        sched.init_distribute()
+        assert sched.tests_finished()
+        crashitem = sched.remove_node(node1)
+        assert crashitem
+        assert sched.tests_finished()
+        assert not sched.hasnodes()
+
+class TestLoadScheduling:
+    def test_schedule_load_simple(self):
+        node1 = MockNode()
+        node2 = MockNode()
+        sched = LoadScheduling(2)
+        sched.addnode(node1)
+        sched.addnode(node2)
+        collection = ["a.py::test_1", "a.py::test_2"]
+        assert not sched.collection_is_completed
+        sched.addnode_collection(node1, collection)
+        assert not sched.collection_is_completed
+        sched.addnode_collection(node2, collection)
+        assert sched.collection_is_completed
+        assert sched.node2collection[node1] == collection
+        assert sched.node2collection[node2] == collection
+        sched.init_distribute()
+        assert not sched.pending
+        assert not sched.tests_finished()
+        assert len(node1.sent) == 2
+        assert len(node2.sent) == 0
+        assert node1.sent == [0, 1]
+        sched.remove_item(node1, node1.sent[0])
+        assert sched.tests_finished()
+        sched.remove_item(node1, node1.sent[1])
+        assert sched.tests_finished()
+
+    def test_init_distribute_chunksize(self):
+        sched = LoadScheduling(2)
+        node1 = MockNode()
+        node2 = MockNode()
+        sched.addnode(node1)
+        sched.addnode(node2)
+        col = ["xyz"] * (6)
+        sched.addnode_collection(node1, col)
+        sched.addnode_collection(node2, col)
+        sched.init_distribute()
+        #assert not sched.tests_finished()
+        sent1 = node1.sent
+        sent2 = node2.sent
+        assert sent1 == [0, 1]
+        assert sent2 == [2, 3]
+        assert sched.pending == [4, 5]
+        assert sched.node2pending[node1] == sent1
+        assert sched.node2pending[node2] == sent2
+        assert len(sched.pending) == 2
+        sched.remove_item(node1, 0)
+        assert node1.sent == [0, 1, 4]
+        assert sched.pending == [5]
+        assert node2.sent == [2, 3]
+        sched.remove_item(node1, 1)
+        assert node1.sent == [0, 1, 4, 5]
+        assert not sched.pending
+
+    def test_add_remove_node(self):
+        node = MockNode()
+        sched = LoadScheduling(1)
+        sched.addnode(node)
+        collection = ["test_file.py::test_func"]
+        sched.addnode_collection(node, collection)
+        assert sched.collection_is_completed
+        sched.init_distribute()
+        assert not sched.pending
+        crashitem = sched.remove_node(node)
+        assert crashitem == collection[0]
+
+    def test_schedule_different_tests_collected(self):
+        """
+        Test that LoadScheduling is logging different tests were
+        collected by slaves when that happens.
+        """
+        node1 = MockNode()
+        node2 = MockNode()
+        sched = LoadScheduling(2)
+        logged_messages = []
+        py.log.setconsumer('loadsched', logged_messages.append)
+        sched.addnode(node1)
+        sched.addnode(node2)
+        sched.addnode_collection(node1, ["a.py::test_1"])
+        sched.addnode_collection(node2, ["a.py::test_2"])
+        sched.init_distribute()
+        logged_content = ''.join(x.content() for x in logged_messages)
+        assert 'Different tests were collected between' in logged_content
+        assert 'Different tests collected, aborting run' in logged_content
+
+
+class TestDistReporter:
+
+    @py.test.mark.xfail
+    def test_rsync_printing(self, testdir, linecomp):
+        config = testdir.parseconfig()
+        from _pytest.pytest_terminal import TerminalReporter
+        rep = TerminalReporter(config, file=linecomp.stringio)
+        config.pluginmanager.register(rep, "terminalreporter")
+        dsession = DSession(config)
+        class gw1:
+            id = "X1"
+            spec = execnet.XSpec("popen")
+        class gw2:
+            id = "X2"
+            spec = execnet.XSpec("popen")
+        #class rinfo:
+        #    version_info = (2, 5, 1, 'final', 0)
+        #    executable = "hello"
+        #    platform = "xyz"
+        #    cwd = "qwe"
+
+        #dsession.pytest_xdist_newgateway(gw1, rinfo)
+        #linecomp.assert_contains_lines([
+        #    "*X1*popen*xyz*2.5*"
+        #])
+        dsession.pytest_xdist_rsyncstart(source="hello", gateways=[gw1, gw2])
+        linecomp.assert_contains_lines([
+            "[X1,X2] rsyncing: hello",
+        ])
+
+
+def test_report_collection_diff_equal():
+    """Test reporting of equal collections."""
+    from_collection = to_collection = ['aaa', 'bbb', 'ccc']
+    assert report_collection_diff(from_collection, to_collection, 1, 2) is None
+
+
+def test_report_collection_diff_different():
+    """Test reporting of different collections."""
+    from_collection = ['aaa', 'bbb', 'ccc', 'YYY']
+    to_collection = ['aZa', 'bbb', 'XXX', 'ccc']
+    error_message = (
+        'Different tests were collected between 1 and 2. The difference is:\n'
+        '--- 1\n'
+        '\n'
+        '+++ 2\n'
+        '\n'
+        '@@ -1,4 +1,4 @@\n'
+        '\n'
+        '-aaa\n'
+        '+aZa\n'
+        ' bbb\n'
+        '+XXX\n'
+        ' ccc\n'
+        '-YYY'
+    )
+
+    msg = report_collection_diff(from_collection, to_collection, 1, 2)
+    assert msg == error_message
+
+@pytest.mark.xfail(reason="duplicate test ids not supported yet")
+def test_pytest_issue419(testdir):
+    testdir.makepyfile("""
+        import pytest
+
+        @pytest.mark.parametrize('birth_year', [1988, 1988, ])
+        def test_2011_table(birth_year):
+            pass
+    """)
+    reprec = testdir.inline_run("-n1")
+    reprec.assertoutcome(passed=2)
+    assert 0

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/test_looponfail.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/test_looponfail.py b/tools/pytest-xdist/testing/test_looponfail.py
new file mode 100644
index 0000000..4aadb32
--- /dev/null
+++ b/tools/pytest-xdist/testing/test_looponfail.py
@@ -0,0 +1,280 @@
+import py
+from xdist.looponfail import RemoteControl
+from xdist.looponfail import StatRecorder
+
+class TestStatRecorder:
+    def test_filechange(self, tmpdir):
+        tmp = tmpdir
+        hello = tmp.ensure("hello.py")
+        sd = StatRecorder([tmp])
+        changed = sd.check()
+        assert not changed
+
+        hello.write("world")
+        changed = sd.check()
+        assert changed
+
+        (hello + "c").write("hello")
+        changed = sd.check()
+        assert not changed
+
+        p = tmp.ensure("new.py")
+        changed = sd.check()
+        assert changed
+
+        p.remove()
+        changed = sd.check()
+        assert changed
+
+        tmp.join("a", "b", "c.py").ensure()
+        changed = sd.check()
+        assert changed
+
+        tmp.join("a", "c.txt").ensure()
+        changed = sd.check()
+        assert changed
+        changed = sd.check()
+        assert not changed
+
+        tmp.join("a").remove()
+        changed = sd.check()
+        assert changed
+
+    def test_dirchange(self, tmpdir):
+        tmp = tmpdir
+        tmp.ensure("dir", "hello.py")
+        sd = StatRecorder([tmp])
+        assert not sd.fil(tmp.join("dir"))
+
+    def test_filechange_deletion_race(self, tmpdir, monkeypatch):
+        tmp = tmpdir
+        sd = StatRecorder([tmp])
+        changed = sd.check()
+        assert not changed
+
+        p = tmp.ensure("new.py")
+        changed = sd.check()
+        assert changed
+
+        p.remove()
+        # make check()'s visit() call return our just removed
+        # path as if we were in a race condition
+        monkeypatch.setattr(tmp, 'visit', lambda *args: [p])
+
+        changed = sd.check()
+        assert changed
+
+    def test_pycremoval(self, tmpdir):
+        tmp = tmpdir
+        hello = tmp.ensure("hello.py")
+        sd = StatRecorder([tmp])
+        changed = sd.check()
+        assert not changed
+
+        pycfile = hello + "c"
+        pycfile.ensure()
+        hello.write("world")
+        changed = sd.check()
+        assert changed
+        assert not pycfile.check()
+
+    def test_waitonchange(self, tmpdir, monkeypatch):
+        tmp = tmpdir
+        sd = StatRecorder([tmp])
+
+        l = [True, False]
+        monkeypatch.setattr(StatRecorder, 'check', lambda self: l.pop())
+        sd.waitonchange(checkinterval=0.2)
+        assert not l
+
+class TestRemoteControl:
+    def test_nofailures(self, testdir):
+        item = testdir.getitem("def test_func(): pass\n")
+        control = RemoteControl(item.config)
+        control.setup()
+        topdir, failures = control.runsession()[:2]
+        assert not failures
+
+    def test_failures_somewhere(self, testdir):
+        item = testdir.getitem("def test_func():\n assert 0\n")
+        control = RemoteControl(item.config)
+        control.setup()
+        failures = control.runsession()
+        assert failures
+        control.setup()
+        item.fspath.write("def test_func():\n assert 1\n")
+        removepyc(item.fspath)
+        topdir, failures = control.runsession()[:2]
+        assert not failures
+
+    def test_failure_change(self, testdir):
+        modcol = testdir.getitem("""
+            def test_func():
+                assert 0
+        """)
+        control = RemoteControl(modcol.config)
+        control.loop_once()
+        assert control.failures
+        modcol.fspath.write(py.code.Source("""
+            def test_func():
+                assert 1
+            def test_new():
+                assert 0
+        """))
+        removepyc(modcol.fspath)
+        control.loop_once()
+        assert not control.failures
+        control.loop_once()
+        assert control.failures
+        assert str(control.failures).find("test_new") != -1
+
+    def test_failure_subdir_no_init(self, testdir):
+        modcol = testdir.getitem("""
+            def test_func():
+                assert 0
+        """)
+        parent = modcol.fspath.dirpath().dirpath()
+        parent.chdir()
+        modcol.config.args = [py.path.local(x).relto(parent)
+                                for x in modcol.config.args]
+        control = RemoteControl(modcol.config)
+        control.loop_once()
+        assert control.failures
+        control.loop_once()
+        assert control.failures
+
+class TestLooponFailing:
+    def test_looponfail_from_fail_to_ok(self, testdir):
+        modcol = testdir.getmodulecol("""
+            def test_one():
+                x = 0
+                assert x == 1
+            def test_two():
+                assert 1
+        """)
+        remotecontrol = RemoteControl(modcol.config)
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 1
+
+        modcol.fspath.write(py.code.Source("""
+            def test_one():
+                assert 1
+            def test_two():
+                assert 1
+        """))
+        removepyc(modcol.fspath)
+        remotecontrol.loop_once()
+        assert not remotecontrol.failures
+
+    def test_looponfail_from_one_to_two_tests(self, testdir):
+        modcol = testdir.getmodulecol("""
+            def test_one():
+                assert 0
+        """)
+        remotecontrol = RemoteControl(modcol.config)
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 1
+        assert 'test_one' in remotecontrol.failures[0]
+
+        modcol.fspath.write(py.code.Source("""
+            def test_one():
+                assert 1 # passes now
+            def test_two():
+                assert 0 # new and fails
+        """))
+        removepyc(modcol.fspath)
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 0
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 1
+        assert 'test_one' not in remotecontrol.failures[0]
+        assert 'test_two' in remotecontrol.failures[0]
+
+    def test_looponfail_removed_test(self, testdir):
+        modcol = testdir.getmodulecol("""
+            def test_one():
+                assert 0
+            def test_two():
+                assert 0
+        """)
+        remotecontrol = RemoteControl(modcol.config)
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 2
+
+        modcol.fspath.write(py.code.Source("""
+            def test_xxx(): # renamed test
+                assert 0
+            def test_two():
+                assert 1 # pass now
+        """))
+        removepyc(modcol.fspath)
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 0
+
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 1
+
+    def test_looponfail_multiple_errors(self, testdir, monkeypatch):
+        modcol = testdir.getmodulecol("""
+            def test_one():
+                assert 0
+        """)
+        remotecontrol = RemoteControl(modcol.config)
+        orig_runsession = remotecontrol.runsession
+
+        def runsession_dups():
+            # twisted.trial test cases may report multiple errors.
+            failures, reports, collection_failed = orig_runsession()
+            print (failures)
+            return failures * 2, reports, collection_failed
+
+        monkeypatch.setattr(remotecontrol, 'runsession', runsession_dups)
+        remotecontrol.loop_once()
+        assert len(remotecontrol.failures) == 1
+
+
+class TestFunctional:
+    def test_fail_to_ok(self, testdir):
+        p = testdir.makepyfile("""
+            def test_one():
+                x = 0
+                assert x == 1
+        """)
+        #p = testdir.mkdir("sub").join(p1.basename)
+        #p1.move(p)
+        child = testdir.spawn_pytest("-f %s --traceconfig" % p)
+        child.expect("def test_one")
+        child.expect("x == 1")
+        child.expect("1 failed")
+        child.expect("### LOOPONFAILING ####")
+        child.expect("waiting for changes")
+        p.write(py.code.Source("""
+            def test_one():
+                x = 1
+                assert x == 1
+        """))
+        child.expect(".*1 passed.*")
+        child.kill(15)
+
+    def test_xfail_passes(self, testdir):
+        p = testdir.makepyfile("""
+            import py
+            @py.test.mark.xfail
+            def test_one():
+                pass
+        """)
+        child = testdir.spawn_pytest("-f %s" % p)
+        child.expect("1 xpass")
+        child.expect("### LOOPONFAILING ####")
+        child.expect("waiting for changes")
+        child.kill(15)
+
+def removepyc(path):
+    # XXX damn those pyc files
+    pyc = path + "c"
+    if pyc.check():
+        pyc.remove()
+    c = path.dirpath("__pycache__")
+    if c.check():
+        c.remove()
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/test_plugin.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/test_plugin.py b/tools/pytest-xdist/testing/test_plugin.py
new file mode 100644
index 0000000..91eda26
--- /dev/null
+++ b/tools/pytest-xdist/testing/test_plugin.py
@@ -0,0 +1,70 @@
+import py
+import execnet
+from xdist.slavemanage import NodeManager
+
+def test_dist_incompatibility_messages(testdir):
+    result = testdir.runpytest("--pdb", "--looponfail")
+    assert result.ret != 0
+    result = testdir.runpytest("--pdb", "-n", "3")
+    assert result.ret != 0
+    assert "incompatible" in result.stderr.str()
+    result = testdir.runpytest("--pdb", "-d", "--tx", "popen")
+    assert result.ret != 0
+    assert "incompatible" in result.stderr.str()
+
+def test_dist_options(testdir):
+    from xdist.plugin import check_options
+    config = testdir.parseconfigure("-n 2")
+    check_options(config)
+    assert config.option.dist == "load"
+    assert config.option.tx == ['popen'] * 2
+    config = testdir.parseconfigure("-d")
+    check_options(config)
+    assert config.option.dist == "load"
+
+class TestDistOptions:
+    def test_getxspecs(self, testdir):
+        config = testdir.parseconfigure("--tx=popen", "--tx", "ssh=xyz")
+        nodemanager = NodeManager(config)
+        xspecs = nodemanager._getxspecs()
+        assert len(xspecs) == 2
+        print(xspecs)
+        assert xspecs[0].popen
+        assert xspecs[1].ssh == "xyz"
+
+    def test_xspecs_multiplied(self, testdir):
+        config = testdir.parseconfigure("--tx=3*popen",)
+        xspecs = NodeManager(config)._getxspecs()
+        assert len(xspecs) == 3
+        assert xspecs[1].popen
+
+    def test_getrsyncdirs(self, testdir):
+        config = testdir.parseconfigure('--rsyncdir=' + str(testdir.tmpdir))
+        nm = NodeManager(config, specs=[execnet.XSpec("popen")])
+        assert not nm._getrsyncdirs()
+        nm = NodeManager(config, specs=[execnet.XSpec("popen//chdir=qwe")])
+        assert nm.roots
+        assert testdir.tmpdir in nm.roots
+
+    def test_getrsyncignore(self, testdir):
+        config = testdir.parseconfigure('--rsyncignore=fo*')
+        nm = NodeManager(config, specs=[execnet.XSpec("popen//chdir=qwe")])
+        assert 'fo*' in nm.rsyncoptions['ignores']
+
+    def test_getrsyncdirs_with_conftest(self, testdir):
+        p = py.path.local()
+        for bn in 'x y z'.split():
+            p.mkdir(bn)
+        testdir.makeini("""
+            [pytest]
+            rsyncdirs= x
+        """)
+        config = testdir.parseconfigure(
+              testdir.tmpdir, '--rsyncdir=y', '--rsyncdir=z')
+        nm = NodeManager(config, specs=[execnet.XSpec("popen//chdir=xyz")])
+        roots = nm._getrsyncdirs()
+        #assert len(roots) == 3 + 1 # pylib
+        assert py.path.local('y') in roots
+        assert py.path.local('z') in roots
+        assert testdir.tmpdir.join('x') in roots
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/test_remote.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/test_remote.py b/tools/pytest-xdist/testing/test_remote.py
new file mode 100644
index 0000000..4a043e7
--- /dev/null
+++ b/tools/pytest-xdist/testing/test_remote.py
@@ -0,0 +1,248 @@
+import py
+from xdist.slavemanage import SlaveController, unserialize_report
+from xdist.remote import serialize_report
+import execnet
+queue = py.builtin._tryimport("queue", "Queue")
+import marshal
+
+WAIT_TIMEOUT = 10.0
+
+def check_marshallable(d):
+    try:
+        marshal.dumps(d)
+    except ValueError:
+        py.std.pprint.pprint(d)
+        raise ValueError("not marshallable")
+
+class EventCall:
+    def __init__(self, eventcall):
+        self.name, self.kwargs = eventcall
+
+    def __str__(self):
+        return "<EventCall %s(**%s)>" %(self.name, self.kwargs)
+
+class SlaveSetup:
+    use_callback = False
+
+    def __init__(self, request):
+        self.testdir = request.getfuncargvalue("testdir")
+        self.request = request
+        self.events = queue.Queue()
+
+    def setup(self, ):
+        self.testdir.chdir()
+        #import os ; os.environ['EXECNET_DEBUG'] = "2"
+        self.gateway = execnet.makegateway()
+        self.config = config = self.testdir.parseconfigure()
+        putevent = self.use_callback and self.events.put or None
+        self.slp = SlaveController(None, self.gateway, config, putevent)
+        self.request.addfinalizer(self.slp.ensure_teardown)
+        self.slp.setup()
+
+    def popevent(self, name=None):
+        while 1:
+            if self.use_callback:
+                data = self.events.get(timeout=WAIT_TIMEOUT)
+            else:
+                data = self.slp.channel.receive(timeout=WAIT_TIMEOUT)
+            ev = EventCall(data)
+            if name is None or ev.name == name:
+                return ev
+            print("skipping %s" % (ev,))
+
+    def sendcommand(self, name, **kwargs):
+        self.slp.sendcommand(name, **kwargs)
+
+def pytest_funcarg__slave(request):
+    return SlaveSetup(request)
+
+def test_remoteinitconfig(testdir):
+    from xdist.remote import remote_initconfig
+    config1 = testdir.parseconfig()
+    config2 = remote_initconfig(config1.option.__dict__, config1.args)
+    assert config2.option.__dict__ == config1.option.__dict__
+    assert config2.pluginmanager.getplugin("terminal") in (-1, None)
+
+class TestReportSerialization:
+    def test_itemreport_outcomes(self, testdir):
+        reprec = testdir.inline_runsource("""
+            import py
+            def test_pass(): pass
+            def test_fail(): 0/0
+            @py.test.mark.skipif("True")
+            def test_skip(): pass
+            def test_skip_imperative():
+                py.test.skip("hello")
+            @py.test.mark.xfail("True")
+            def test_xfail(): 0/0
+            def test_xfail_imperative():
+                py.test.xfail("hello")
+        """)
+        reports = reprec.getreports("pytest_runtest_logreport")
+        assert len(reports) == 17 # with setup/teardown "passed" reports
+        for rep in reports:
+            d = serialize_report(rep)
+            check_marshallable(d)
+            newrep = unserialize_report("testreport", d)
+            assert newrep.passed == rep.passed
+            assert newrep.failed == rep.failed
+            assert newrep.skipped == rep.skipped
+            if newrep.skipped and not hasattr(newrep, "wasxfail"):
+                assert len(newrep.longrepr) == 3
+            assert newrep.outcome == rep.outcome
+            assert newrep.when == rep.when
+            assert newrep.keywords == rep.keywords
+            if rep.failed:
+                assert newrep.longrepr == str(rep.longrepr)
+
+    def test_collectreport_passed(self, testdir):
+        reprec = testdir.inline_runsource("def test_func(): pass")
+        reports = reprec.getreports("pytest_collectreport")
+        for rep in reports:
+            d = serialize_report(rep)
+            check_marshallable(d)
+            newrep = unserialize_report("collectreport", d)
+            assert newrep.passed == rep.passed
+            assert newrep.failed == rep.failed
+            assert newrep.skipped == rep.skipped
+
+    def test_collectreport_fail(self, testdir):
+        reprec = testdir.inline_runsource("qwe abc")
+        reports = reprec.getreports("pytest_collectreport")
+        assert reports
+        for rep in reports:
+            d = serialize_report(rep)
+            check_marshallable(d)
+            newrep = unserialize_report("collectreport", d)
+            assert newrep.passed == rep.passed
+            assert newrep.failed == rep.failed
+            assert newrep.skipped == rep.skipped
+            if rep.failed:
+                assert newrep.longrepr == str(rep.longrepr)
+
+    def test_extended_report_deserialization(self, testdir):
+        reprec = testdir.inline_runsource("qwe abc")
+        reports = reprec.getreports("pytest_collectreport")
+        assert reports
+        for rep in reports:
+            rep.extra = True
+            d = serialize_report(rep)
+            check_marshallable(d)
+            newrep = unserialize_report("collectreport", d)
+            assert newrep.extra
+            assert newrep.passed == rep.passed
+            assert newrep.failed == rep.failed
+            assert newrep.skipped == rep.skipped
+            if rep.failed:
+                assert newrep.longrepr == str(rep.longrepr)
+
+
+class TestSlaveInteractor:
+    def test_basic_collect_and_runtests(self, slave):
+        slave.testdir.makepyfile("""
+            def test_func():
+                pass
+        """)
+        slave.setup()
+        ev = slave.popevent()
+        assert ev.name == "slaveready"
+        ev = slave.popevent()
+        assert ev.name == "collectionstart"
+        assert not ev.kwargs
+        ev = slave.popevent("collectionfinish")
+        assert ev.kwargs['topdir'] == slave.testdir.tmpdir
+        ids = ev.kwargs['ids']
+        assert len(ids) == 1
+        slave.sendcommand("runtests", indices=list(range(len(ids))))
+        slave.sendcommand("shutdown")
+        ev = slave.popevent("logstart")
+        assert ev.kwargs["nodeid"].endswith("test_func")
+        assert len(ev.kwargs["location"]) == 3
+        ev = slave.popevent("testreport") # setup
+        ev = slave.popevent("testreport")
+        assert ev.name == "testreport"
+        rep = unserialize_report(ev.name, ev.kwargs['data'])
+        assert rep.nodeid.endswith("::test_func")
+        assert rep.passed
+        assert rep.when == "call"
+        ev = slave.popevent("slavefinished")
+        assert 'slaveoutput' in ev.kwargs
+
+    def test_remote_collect_skip(self, slave):
+        slave.testdir.makepyfile("""
+            import py
+            py.test.skip("hello")
+        """)
+        slave.setup()
+        ev = slave.popevent("collectionstart")
+        assert not ev.kwargs
+        ev = slave.popevent()
+        assert ev.name == "collectreport"
+        ev = slave.popevent()
+        assert ev.name == "collectreport"
+        rep = unserialize_report(ev.name, ev.kwargs['data'])
+        assert rep.skipped
+        ev = slave.popevent("collectionfinish")
+        assert not ev.kwargs['ids']
+
+    def test_remote_collect_fail(self, slave):
+        slave.testdir.makepyfile("""aasd qwe""")
+        slave.setup()
+        ev = slave.popevent("collectionstart")
+        assert not ev.kwargs
+        ev = slave.popevent()
+        assert ev.name == "collectreport"
+        ev = slave.popevent()
+        assert ev.name == "collectreport"
+        rep = unserialize_report(ev.name, ev.kwargs['data'])
+        assert rep.failed
+        ev = slave.popevent("collectionfinish")
+        assert not ev.kwargs['ids']
+
+    def test_runtests_all(self, slave):
+        slave.testdir.makepyfile("""
+            def test_func(): pass
+            def test_func2(): pass
+        """)
+        slave.setup()
+        ev = slave.popevent()
+        assert ev.name == "slaveready"
+        ev = slave.popevent()
+        assert ev.name == "collectionstart"
+        assert not ev.kwargs
+        ev = slave.popevent("collectionfinish")
+        ids = ev.kwargs['ids']
+        assert len(ids) == 2
+        slave.sendcommand("runtests_all", )
+        slave.sendcommand("shutdown", )
+        for func in "::test_func", "::test_func2":
+            for i in range(3):  # setup/call/teardown
+                ev = slave.popevent("testreport")
+                assert ev.name == "testreport"
+                rep = unserialize_report(ev.name, ev.kwargs['data'])
+                assert rep.nodeid.endswith(func)
+        ev = slave.popevent("slavefinished")
+        assert 'slaveoutput' in ev.kwargs
+
+    def test_happy_run_events_converted(self, testdir, slave):
+        py.test.xfail("implement a simple test for event production")
+        assert not slave.use_callback
+        slave.testdir.makepyfile("""
+            def test_func():
+                pass
+        """)
+        slave.setup()
+        hookrec = testdir.getreportrecorder(slave.config)
+        for data in slave.slp.channel:
+            slave.slp.process_from_remote(data)
+        slave.slp.process_from_remote(slave.slp.ENDMARK)
+        py.std.pprint.pprint(hookrec.hookrecorder.calls)
+        hookrec.hookrecorder.contains([
+            ("pytest_collectstart", "collector.fspath == aaa"),
+            ("pytest_pycollect_makeitem", "name == 'test_func'"),
+            ("pytest_collectreport", "report.collector.fspath == aaa"),
+            ("pytest_collectstart", "collector.fspath == bbb"),
+            ("pytest_pycollect_makeitem", "name == 'test_func'"),
+            ("pytest_collectreport", "report.collector.fspath == bbb"),
+        ])
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/testing/test_slavemanage.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/testing/test_slavemanage.py b/tools/pytest-xdist/testing/test_slavemanage.py
new file mode 100644
index 0000000..a94181b
--- /dev/null
+++ b/tools/pytest-xdist/testing/test_slavemanage.py
@@ -0,0 +1,241 @@
+import py
+import execnet
+from xdist.slavemanage import HostRSync, NodeManager
+
+pytest_plugins = "pytester",
+
+def pytest_funcarg__hookrecorder(request):
+    _pytest = request.getfuncargvalue('_pytest')
+    config = request.getfuncargvalue('config')
+    return _pytest.gethookrecorder(config.hook)
+
+def pytest_funcarg__config(request):
+    testdir = request.getfuncargvalue("testdir")
+    config = testdir.parseconfig()
+    return config
+
+def pytest_funcarg__mysetup(request):
+    class mysetup:
+        def __init__(self, request):
+            temp = request.getfuncargvalue("tmpdir")
+            self.source = temp.mkdir("source")
+            self.dest = temp.mkdir("dest")
+            request.getfuncargvalue("_pytest")
+    return mysetup(request)
+
+class TestNodeManagerPopen:
+    def test_popen_no_default_chdir(self, config):
+        gm = NodeManager(config, ["popen"])
+        assert gm.specs[0].chdir is None
+
+    def test_default_chdir(self, config):
+        l = ["ssh=noco", "socket=xyz"]
+        for spec in NodeManager(config, l).specs:
+            assert spec.chdir == "pyexecnetcache"
+        for spec in NodeManager(config, l, defaultchdir="abc").specs:
+            assert spec.chdir == "abc"
+
+    def test_popen_makegateway_events(self, config, hookrecorder, _pytest):
+        hm = NodeManager(config, ["popen"] * 2)
+        hm.makegateways()
+        call = hookrecorder.popcall("pytest_xdist_setupnodes")
+        assert len(call.specs) == 2
+
+        call = hookrecorder.popcall("pytest_xdist_newgateway")
+        assert call.gateway.spec == execnet.XSpec("popen")
+        assert call.gateway.id == "gw0"
+        call = hookrecorder.popcall("pytest_xdist_newgateway")
+        assert call.gateway.id == "gw1"
+        assert len(hm.group) == 2
+        hm.teardown_nodes()
+        assert not len(hm.group)
+
+    def test_popens_rsync(self, config, mysetup):
+        source = mysetup.source
+        hm = NodeManager(config, ["popen"] * 2)
+        hm.makegateways()
+        assert len(hm.group) == 2
+        for gw in hm.group:
+            class pseudoexec:
+                args = []
+                def __init__(self, *args):
+                    self.args.extend(args)
+                def waitclose(self):
+                    pass
+            gw.remote_exec = pseudoexec
+        l = []
+        hm.rsync(source, notify=lambda *args: l.append(args))
+        assert not l
+        hm.teardown_nodes()
+        assert not len(hm.group)
+        assert "sys.path.insert" in gw.remote_exec.args[0]
+
+    def test_rsync_popen_with_path(self, config, mysetup):
+        source, dest = mysetup.source, mysetup.dest
+        hm = NodeManager(config, ["popen//chdir=%s" %dest] * 1)
+        hm.makegateways()
+        source.ensure("dir1", "dir2", "hello")
+        l = []
+        hm.rsync(source, notify=lambda *args: l.append(args))
+        assert len(l) == 1
+        assert l[0] == ("rsyncrootready", hm.group['gw0'].spec, source)
+        hm.teardown_nodes()
+        dest = dest.join(source.basename)
+        assert dest.join("dir1").check()
+        assert dest.join("dir1", "dir2").check()
+        assert dest.join("dir1", "dir2", 'hello').check()
+
+    def test_rsync_same_popen_twice(self, config, mysetup, hookrecorder):
+        source, dest = mysetup.source, mysetup.dest
+        hm = NodeManager(config, ["popen//chdir=%s" %dest] * 2)
+        hm.makegateways()
+        source.ensure("dir1", "dir2", "hello")
+        hm.rsync(source)
+        call = hookrecorder.popcall("pytest_xdist_rsyncstart")
+        assert call.source == source
+        assert len(call.gateways) == 1
+        assert call.gateways[0] in hm.group
+        call = hookrecorder.popcall("pytest_xdist_rsyncfinish")
+
+class TestHRSync:
+    def pytest_funcarg__mysetup(self, request):
+        class mysetup:
+            def __init__(self, request):
+                tmp = request.getfuncargvalue('tmpdir')
+                self.source = tmp.mkdir("source")
+                self.dest = tmp.mkdir("dest")
+        return mysetup(request)
+
+    def test_hrsync_filter(self, mysetup):
+        source, _ = mysetup.source, mysetup.dest  # noqa
+        source.ensure("dir", "file.txt")
+        source.ensure(".svn", "entries")
+        source.ensure(".somedotfile", "moreentries")
+        source.ensure("somedir", "editfile~")
+        syncer = HostRSync(source, ignores=NodeManager.DEFAULT_IGNORES)
+        l = list(source.visit(rec=syncer.filter,
+                                   fil=syncer.filter))
+        assert len(l) == 3
+        basenames = [x.basename for x in l]
+        assert 'dir' in basenames
+        assert 'file.txt' in basenames
+        assert 'somedir' in basenames
+
+    def test_hrsync_one_host(self, mysetup):
+        source, dest = mysetup.source, mysetup.dest
+        gw = execnet.makegateway("popen//chdir=%s" % dest)
+        finished = []
+        rsync = HostRSync(source)
+        rsync.add_target_host(gw, finished=lambda: finished.append(1))
+        source.join("hello.py").write("world")
+        rsync.send()
+        gw.exit()
+        assert dest.join(source.basename, "hello.py").check()
+        assert len(finished) == 1
+
+
+class TestNodeManager:
+    @py.test.mark.xfail
+    def test_rsync_roots_no_roots(self, testdir, mysetup):
+        mysetup.source.ensure("dir1", "file1").write("hello")
+        config = testdir.parseconfig(mysetup.source)
+        nodemanager = NodeManager(config, ["popen//chdir=%s" % mysetup.dest])
+        #assert nodemanager.config.topdir == source == config.topdir
+        nodemanager.makegateways()
+        nodemanager.rsync_roots()
+        p, = nodemanager.gwmanager.multi_exec(
+            "import os ; channel.send(os.getcwd())").receive_each()
+        p = py.path.local(p)
+        py.builtin.print_("remote curdir", p)
+        assert p == mysetup.dest.join(config.topdir.basename)
+        assert p.join("dir1").check()
+        assert p.join("dir1", "file1").check()
+
+    def test_popen_rsync_subdir(self, testdir, mysetup):
+        source, dest = mysetup.source, mysetup.dest
+        dir1 = mysetup.source.mkdir("dir1")
+        dir2 = dir1.mkdir("dir2")
+        dir2.ensure("hello")
+        for rsyncroot in (dir1, source):
+            dest.remove()
+            nodemanager = NodeManager(testdir.parseconfig(
+                "--tx", "popen//chdir=%s" % dest,
+                "--rsyncdir", rsyncroot,
+                source,
+            ))
+            nodemanager.makegateways()
+            nodemanager.rsync_roots()
+            if rsyncroot == source:
+                dest = dest.join("source")
+            assert dest.join("dir1").check()
+            assert dest.join("dir1", "dir2").check()
+            assert dest.join("dir1", "dir2", 'hello').check()
+            nodemanager.teardown_nodes()
+
+    def test_init_rsync_roots(self, testdir, mysetup):
+        source, dest = mysetup.source, mysetup.dest
+        dir2 = source.ensure("dir1", "dir2", dir=1)
+        source.ensure("dir1", "somefile", dir=1)
+        dir2.ensure("hello")
+        source.ensure("bogusdir", "file")
+        source.join("tox.ini").write(py.std.textwrap.dedent("""
+            [pytest]
+            rsyncdirs=dir1/dir2
+        """))
+        config = testdir.parseconfig(source)
+        nodemanager = NodeManager(config, ["popen//chdir=%s" % dest])
+        nodemanager.makegateways()
+        nodemanager.rsync_roots()
+        assert dest.join("dir2").check()
+        assert not dest.join("dir1").check()
+        assert not dest.join("bogus").check()
+
+    def test_rsyncignore(self, testdir, mysetup):
+        source, dest = mysetup.source, mysetup.dest
+        dir2 = source.ensure("dir1", "dir2", dir=1)
+        source.ensure("dir5", "dir6", "bogus")
+        source.ensure("dir5", "file")
+        dir2.ensure("hello")
+        source.ensure("foo", "bar")
+        source.ensure("bar", "foo")
+        source.join("tox.ini").write(py.std.textwrap.dedent("""
+            [pytest]
+            rsyncdirs = dir1 dir5
+            rsyncignore = dir1/dir2 dir5/dir6 foo*
+        """))
+        config = testdir.parseconfig(source)
+        config.option.rsyncignore = ['bar']
+        nodemanager = NodeManager(config, ["popen//chdir=%s" % dest])
+        nodemanager.makegateways()
+        nodemanager.rsync_roots()
+        assert dest.join("dir1").check()
+        assert not dest.join("dir1", "dir2").check()
+        assert dest.join("dir5","file").check()
+        assert not dest.join("dir6").check()
+        assert not dest.join('foo').check()
+        assert not dest.join('bar').check()
+
+    def test_optimise_popen(self, testdir, mysetup):
+        source = mysetup.source
+        specs = ["popen"] * 3
+        source.join("conftest.py").write("rsyncdirs = ['a']")
+        source.ensure('a', dir=1)
+        config = testdir.parseconfig(source)
+        nodemanager = NodeManager(config, specs)
+        nodemanager.makegateways()
+        nodemanager.rsync_roots()
+        for gwspec in nodemanager.specs:
+            assert gwspec._samefilesystem()
+            assert not gwspec.chdir
+
+    def test_ssh_setup_nodes(self, specssh, testdir):
+        testdir.makepyfile(__init__="", test_x="""
+            def test_one():
+                pass
+        """)
+        reprec = testdir.inline_run("-d", "--rsyncdir=%s" % testdir.tmpdir,
+                "--tx", specssh, testdir.tmpdir)
+        rep, = reprec.getreports("pytest_runtest_logreport")
+        assert rep.passed
+
+

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/tox.ini
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/tox.ini b/tools/pytest-xdist/tox.ini
new file mode 100644
index 0000000..2ab3247
--- /dev/null
+++ b/tools/pytest-xdist/tox.ini
@@ -0,0 +1,32 @@
+[tox]
+envlist=py26,py32,py33,py34,py27,py27-pexpect,py33-pexpect,py26,py26-old,py33-old,flakes
+
+[testenv]
+changedir=testing
+deps=pytest>=2.5.1
+commands= py.test --junitxml={envlogdir}/junit-{envname}.xml []
+
+[testenv:py27-pexpect]
+deps={[testenv]deps}
+    pexpect
+[testenv:py33-pexpect]
+deps={[testenv]deps}
+    pexpect
+
+[testenv:flakes]
+changedir=
+deps = pytest-flakes>=0.2
+commands = py.test --flakes -m flakes testing xdist
+
+[testenv:py26-old]
+deps=
+    pytest==2.4.2
+
+[testenv:py33-old]
+basepython = python3.3
+deps=
+    pytest==2.4.2
+
+[pytest]
+addopts = -rsfxX
+;; hello

http://git-wip-us.apache.org/repos/asf/cloudstack/blob/145542d6/tools/pytest-xdist/xdist/__init__.py
----------------------------------------------------------------------
diff --git a/tools/pytest-xdist/xdist/__init__.py b/tools/pytest-xdist/xdist/__init__.py
new file mode 100644
index 0000000..0606670
--- /dev/null
+++ b/tools/pytest-xdist/xdist/__init__.py
@@ -0,0 +1,2 @@
+#
+__version__ = '1.11'