You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@senssoft.apache.org by ar...@apache.org on 2016/12/16 17:10:37 UTC

[19/58] [abbrv] [partial] incubator-senssoft-tap git commit: Fixed .gitignore file

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py b/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py
deleted file mode 100644
index a6f44a5..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py
+++ /dev/null
@@ -1,347 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-lockfile.py - Platform-independent advisory file locks.
-
-Requires Python 2.5 unless you apply 2.4.diff
-Locking is done on a per-thread basis instead of a per-process basis.
-
-Usage:
-
->>> lock = LockFile('somefile')
->>> try:
-...     lock.acquire()
-... except AlreadyLocked:
-...     print 'somefile', 'is locked already.'
-... except LockFailed:
-...     print 'somefile', 'can\\'t be locked.'
-... else:
-...     print 'got lock'
-got lock
->>> print lock.is_locked()
-True
->>> lock.release()
-
->>> lock = LockFile('somefile')
->>> print lock.is_locked()
-False
->>> with lock:
-...    print lock.is_locked()
-True
->>> print lock.is_locked()
-False
-
->>> lock = LockFile('somefile')
->>> # It is okay to lock twice from the same thread...
->>> with lock:
-...     lock.acquire()
-...
->>> # Though no counter is kept, so you can't unlock multiple times...
->>> print lock.is_locked()
-False
-
-Exceptions:
-
-    Error - base class for other exceptions
-        LockError - base class for all locking exceptions
-            AlreadyLocked - Another thread or process already holds the lock
-            LockFailed - Lock failed for some other reason
-        UnlockError - base class for all unlocking exceptions
-            AlreadyUnlocked - File was not locked.
-            NotMyLock - File was locked but not by the current thread/process
-"""
-
-from __future__ import absolute_import
-
-import functools
-import os
-import socket
-import threading
-import warnings
-
-# Work with PEP8 and non-PEP8 versions of threading module.
-if not hasattr(threading, "current_thread"):
-    threading.current_thread = threading.currentThread
-if not hasattr(threading.Thread, "get_name"):
-    threading.Thread.get_name = threading.Thread.getName
-
-__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked',
-           'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock',
-           'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock',
-           'LockBase', 'locked']
-
-
-class Error(Exception):
-    """
-    Base class for other exceptions.
-
-    >>> try:
-    ...   raise Error
-    ... except Exception:
-    ...   pass
-    """
-    pass
-
-
-class LockError(Error):
-    """
-    Base class for error arising from attempts to acquire the lock.
-
-    >>> try:
-    ...   raise LockError
-    ... except Error:
-    ...   pass
-    """
-    pass
-
-
-class LockTimeout(LockError):
-    """Raised when lock creation fails within a user-defined period of time.
-
-    >>> try:
-    ...   raise LockTimeout
-    ... except LockError:
-    ...   pass
-    """
-    pass
-
-
-class AlreadyLocked(LockError):
-    """Some other thread/process is locking the file.
-
-    >>> try:
-    ...   raise AlreadyLocked
-    ... except LockError:
-    ...   pass
-    """
-    pass
-
-
-class LockFailed(LockError):
-    """Lock file creation failed for some other reason.
-
-    >>> try:
-    ...   raise LockFailed
-    ... except LockError:
-    ...   pass
-    """
-    pass
-
-
-class UnlockError(Error):
-    """
-    Base class for errors arising from attempts to release the lock.
-
-    >>> try:
-    ...   raise UnlockError
-    ... except Error:
-    ...   pass
-    """
-    pass
-
-
-class NotLocked(UnlockError):
-    """Raised when an attempt is made to unlock an unlocked file.
-
-    >>> try:
-    ...   raise NotLocked
-    ... except UnlockError:
-    ...   pass
-    """
-    pass
-
-
-class NotMyLock(UnlockError):
-    """Raised when an attempt is made to unlock a file someone else locked.
-
-    >>> try:
-    ...   raise NotMyLock
-    ... except UnlockError:
-    ...   pass
-    """
-    pass
-
-
-class _SharedBase(object):
-    def __init__(self, path):
-        self.path = path
-
-    def acquire(self, timeout=None):
-        """
-        Acquire the lock.
-
-        * If timeout is omitted (or None), wait forever trying to lock the
-          file.
-
-        * If timeout > 0, try to acquire the lock for that many seconds.  If
-          the lock period expires and the file is still locked, raise
-          LockTimeout.
-
-        * If timeout <= 0, raise AlreadyLocked immediately if the file is
-          already locked.
-        """
-        raise NotImplemented("implement in subclass")
-
-    def release(self):
-        """
-        Release the lock.
-
-        If the file is not locked, raise NotLocked.
-        """
-        raise NotImplemented("implement in subclass")
-
-    def __enter__(self):
-        """
-        Context manager support.
-        """
-        self.acquire()
-        return self
-
-    def __exit__(self, *_exc):
-        """
-        Context manager support.
-        """
-        self.release()
-
-    def __repr__(self):
-        return "<%s: %r>" % (self.__class__.__name__, self.path)
-
-
-class LockBase(_SharedBase):
-    """Base class for platform-specific lock classes."""
-    def __init__(self, path, threaded=True, timeout=None):
-        """
-        >>> lock = LockBase('somefile')
-        >>> lock = LockBase('somefile', threaded=False)
-        """
-        super(LockBase, self).__init__(path)
-        self.lock_file = os.path.abspath(path) + ".lock"
-        self.hostname = socket.gethostname()
-        self.pid = os.getpid()
-        if threaded:
-            t = threading.current_thread()
-            # Thread objects in Python 2.4 and earlier do not have ident
-            # attrs.  Worm around that.
-            ident = getattr(t, "ident", hash(t))
-            self.tname = "-%x" % (ident & 0xffffffff)
-        else:
-            self.tname = ""
-        dirname = os.path.dirname(self.lock_file)
-
-        # unique name is mostly about the current process, but must
-        # also contain the path -- otherwise, two adjacent locked
-        # files conflict (one file gets locked, creating lock-file and
-        # unique file, the other one gets locked, creating lock-file
-        # and overwriting the already existing lock-file, then one
-        # gets unlocked, deleting both lock-file and unique file,
-        # finally the last lock errors out upon releasing.
-        self.unique_name = os.path.join(dirname,
-                                        "%s%s.%s%s" % (self.hostname,
-                                                       self.tname,
-                                                       self.pid,
-                                                       hash(self.path)))
-        self.timeout = timeout
-
-    def is_locked(self):
-        """
-        Tell whether or not the file is locked.
-        """
-        raise NotImplemented("implement in subclass")
-
-    def i_am_locking(self):
-        """
-        Return True if this object is locking the file.
-        """
-        raise NotImplemented("implement in subclass")
-
-    def break_lock(self):
-        """
-        Remove a lock.  Useful if a locking thread failed to unlock.
-        """
-        raise NotImplemented("implement in subclass")
-
-    def __repr__(self):
-        return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,
-                                   self.path)
-
-
-def _fl_helper(cls, mod, *args, **kwds):
-    warnings.warn("Import from %s module instead of lockfile package" % mod,
-                  DeprecationWarning, stacklevel=2)
-    # This is a bit funky, but it's only for awhile.  The way the unit tests
-    # are constructed this function winds up as an unbound method, so it
-    # actually takes three args, not two.  We want to toss out self.
-    if not isinstance(args[0], str):
-        # We are testing, avoid the first arg
-        args = args[1:]
-    if len(args) == 1 and not kwds:
-        kwds["threaded"] = True
-    return cls(*args, **kwds)
-
-
-def LinkFileLock(*args, **kwds):
-    """Factory function provided for backwards compatibility.
-
-    Do not use in new code.  Instead, import LinkLockFile from the
-    lockfile.linklockfile module.
-    """
-    from . import linklockfile
-    return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",
-                      *args, **kwds)
-
-
-def MkdirFileLock(*args, **kwds):
-    """Factory function provided for backwards compatibility.
-
-    Do not use in new code.  Instead, import MkdirLockFile from the
-    lockfile.mkdirlockfile module.
-    """
-    from . import mkdirlockfile
-    return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",
-                      *args, **kwds)
-
-
-def SQLiteFileLock(*args, **kwds):
-    """Factory function provided for backwards compatibility.
-
-    Do not use in new code.  Instead, import SQLiteLockFile from the
-    lockfile.mkdirlockfile module.
-    """
-    from . import sqlitelockfile
-    return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",
-                      *args, **kwds)
-
-
-def locked(path, timeout=None):
-    """Decorator which enables locks for decorated function.
-
-    Arguments:
-     - path: path for lockfile.
-     - timeout (optional): Timeout for acquiring lock.
-
-     Usage:
-         @locked('/var/run/myname', timeout=0)
-         def myname(...):
-             ...
-    """
-    def decor(func):
-        @functools.wraps(func)
-        def wrapper(*args, **kwargs):
-            lock = FileLock(path, timeout=timeout)
-            lock.acquire()
-            try:
-                return func(*args, **kwargs)
-            finally:
-                lock.release()
-        return wrapper
-    return decor
-
-
-if hasattr(os, "link"):
-    from . import linklockfile as _llf
-    LockFile = _llf.LinkLockFile
-else:
-    from . import mkdirlockfile as _mlf
-    LockFile = _mlf.MkdirLockFile
-
-FileLock = LockFile

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py b/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py
deleted file mode 100644
index 2ca9be0..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from __future__ import absolute_import
-
-import time
-import os
-
-from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
-               AlreadyLocked)
-
-
-class LinkLockFile(LockBase):
-    """Lock access to a file using atomic property of link(2).
-
-    >>> lock = LinkLockFile('somefile')
-    >>> lock = LinkLockFile('somefile', threaded=False)
-    """
-
-    def acquire(self, timeout=None):
-        try:
-            open(self.unique_name, "wb").close()
-        except IOError:
-            raise LockFailed("failed to create %s" % self.unique_name)
-
-        timeout = timeout if timeout is not None else self.timeout
-        end_time = time.time()
-        if timeout is not None and timeout > 0:
-            end_time += timeout
-
-        while True:
-            # Try and create a hard link to it.
-            try:
-                os.link(self.unique_name, self.lock_file)
-            except OSError:
-                # Link creation failed.  Maybe we've double-locked?
-                nlinks = os.stat(self.unique_name).st_nlink
-                if nlinks == 2:
-                    # The original link plus the one I created == 2.  We're
-                    # good to go.
-                    return
-                else:
-                    # Otherwise the lock creation failed.
-                    if timeout is not None and time.time() > end_time:
-                        os.unlink(self.unique_name)
-                        if timeout > 0:
-                            raise LockTimeout("Timeout waiting to acquire"
-                                              " lock for %s" %
-                                              self.path)
-                        else:
-                            raise AlreadyLocked("%s is already locked" %
-                                                self.path)
-                    time.sleep(timeout is not None and timeout / 10 or 0.1)
-            else:
-                # Link creation succeeded.  We're good to go.
-                return
-
-    def release(self):
-        if not self.is_locked():
-            raise NotLocked("%s is not locked" % self.path)
-        elif not os.path.exists(self.unique_name):
-            raise NotMyLock("%s is locked, but not by me" % self.path)
-        os.unlink(self.unique_name)
-        os.unlink(self.lock_file)
-
-    def is_locked(self):
-        return os.path.exists(self.lock_file)
-
-    def i_am_locking(self):
-        return (self.is_locked() and
-                os.path.exists(self.unique_name) and
-                os.stat(self.unique_name).st_nlink == 2)
-
-    def break_lock(self):
-        if os.path.exists(self.lock_file):
-            os.unlink(self.lock_file)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py b/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py
deleted file mode 100644
index 05a8c96..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py
+++ /dev/null
@@ -1,84 +0,0 @@
-from __future__ import absolute_import, division
-
-import time
-import os
-import sys
-import errno
-
-from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
-               AlreadyLocked)
-
-
-class MkdirLockFile(LockBase):
-    """Lock file by creating a directory."""
-    def __init__(self, path, threaded=True, timeout=None):
-        """
-        >>> lock = MkdirLockFile('somefile')
-        >>> lock = MkdirLockFile('somefile', threaded=False)
-        """
-        LockBase.__init__(self, path, threaded, timeout)
-        # Lock file itself is a directory.  Place the unique file name into
-        # it.
-        self.unique_name = os.path.join(self.lock_file,
-                                        "%s.%s%s" % (self.hostname,
-                                                     self.tname,
-                                                     self.pid))
-
-    def acquire(self, timeout=None):
-        timeout = timeout if timeout is not None else self.timeout
-        end_time = time.time()
-        if timeout is not None and timeout > 0:
-            end_time += timeout
-
-        if timeout is None:
-            wait = 0.1
-        else:
-            wait = max(0, timeout / 10)
-
-        while True:
-            try:
-                os.mkdir(self.lock_file)
-            except OSError:
-                err = sys.exc_info()[1]
-                if err.errno == errno.EEXIST:
-                    # Already locked.
-                    if os.path.exists(self.unique_name):
-                        # Already locked by me.
-                        return
-                    if timeout is not None and time.time() > end_time:
-                        if timeout > 0:
-                            raise LockTimeout("Timeout waiting to acquire"
-                                              " lock for %s" %
-                                              self.path)
-                        else:
-                            # Someone else has the lock.
-                            raise AlreadyLocked("%s is already locked" %
-                                                self.path)
-                    time.sleep(wait)
-                else:
-                    # Couldn't create the lock for some other reason
-                    raise LockFailed("failed to create %s" % self.lock_file)
-            else:
-                open(self.unique_name, "wb").close()
-                return
-
-    def release(self):
-        if not self.is_locked():
-            raise NotLocked("%s is not locked" % self.path)
-        elif not os.path.exists(self.unique_name):
-            raise NotMyLock("%s is locked, but not by me" % self.path)
-        os.unlink(self.unique_name)
-        os.rmdir(self.lock_file)
-
-    def is_locked(self):
-        return os.path.exists(self.lock_file)
-
-    def i_am_locking(self):
-        return (self.is_locked() and
-                os.path.exists(self.unique_name))
-
-    def break_lock(self):
-        if os.path.exists(self.lock_file):
-            for name in os.listdir(self.lock_file):
-                os.unlink(os.path.join(self.lock_file, name))
-            os.rmdir(self.lock_file)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py b/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py
deleted file mode 100644
index 069e85b..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# pidlockfile.py
-#
-# Copyright � 2008\u20132009 Ben Finney <be...@benfinney.id.au>
-#
-# This is free software: you may copy, modify, and/or distribute this work
-# under the terms of the Python Software Foundation License, version 2 or
-# later as published by the Python Software Foundation.
-# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
-
-""" Lockfile behaviour implemented via Unix PID files.
-    """
-
-from __future__ import absolute_import
-
-import errno
-import os
-import time
-
-from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock,
-               LockTimeout)
-
-
-class PIDLockFile(LockBase):
-    """ Lockfile implemented as a Unix PID file.
-
-    The lock file is a normal file named by the attribute `path`.
-    A lock's PID file contains a single line of text, containing
-    the process ID (PID) of the process that acquired the lock.
-
-    >>> lock = PIDLockFile('somefile')
-    >>> lock = PIDLockFile('somefile')
-    """
-
-    def __init__(self, path, threaded=False, timeout=None):
-        # pid lockfiles don't support threaded operation, so always force
-        # False as the threaded arg.
-        LockBase.__init__(self, path, False, timeout)
-        self.unique_name = self.path
-
-    def read_pid(self):
-        """ Get the PID from the lock file.
-            """
-        return read_pid_from_pidfile(self.path)
-
-    def is_locked(self):
-        """ Test if the lock is currently held.
-
-            The lock is held if the PID file for this lock exists.
-
-            """
-        return os.path.exists(self.path)
-
-    def i_am_locking(self):
-        """ Test if the lock is held by the current process.
-
-        Returns ``True`` if the current process ID matches the
-        number stored in the PID file.
-        """
-        return self.is_locked() and os.getpid() == self.read_pid()
-
-    def acquire(self, timeout=None):
-        """ Acquire the lock.
-
-        Creates the PID file for this lock, or raises an error if
-        the lock could not be acquired.
-        """
-
-        timeout = timeout if timeout is not None else self.timeout
-        end_time = time.time()
-        if timeout is not None and timeout > 0:
-            end_time += timeout
-
-        while True:
-            try:
-                write_pid_to_pidfile(self.path)
-            except OSError as exc:
-                if exc.errno == errno.EEXIST:
-                    # The lock creation failed.  Maybe sleep a bit.
-                    if time.time() > end_time:
-                        if timeout is not None and timeout > 0:
-                            raise LockTimeout("Timeout waiting to acquire"
-                                              " lock for %s" %
-                                              self.path)
-                        else:
-                            raise AlreadyLocked("%s is already locked" %
-                                                self.path)
-                    time.sleep(timeout is not None and timeout / 10 or 0.1)
-                else:
-                    raise LockFailed("failed to create %s" % self.path)
-            else:
-                return
-
-    def release(self):
-        """ Release the lock.
-
-            Removes the PID file to release the lock, or raises an
-            error if the current process does not hold the lock.
-
-            """
-        if not self.is_locked():
-            raise NotLocked("%s is not locked" % self.path)
-        if not self.i_am_locking():
-            raise NotMyLock("%s is locked, but not by me" % self.path)
-        remove_existing_pidfile(self.path)
-
-    def break_lock(self):
-        """ Break an existing lock.
-
-            Removes the PID file if it already exists, otherwise does
-            nothing.
-
-            """
-        remove_existing_pidfile(self.path)
-
-
-def read_pid_from_pidfile(pidfile_path):
-    """ Read the PID recorded in the named PID file.
-
-        Read and return the numeric PID recorded as text in the named
-        PID file. If the PID file cannot be read, or if the content is
-        not a valid PID, return ``None``.
-
-        """
-    pid = None
-    try:
-        pidfile = open(pidfile_path, 'r')
-    except IOError:
-        pass
-    else:
-        # According to the FHS 2.3 section on PID files in /var/run:
-        #
-        #   The file must consist of the process identifier in
-        #   ASCII-encoded decimal, followed by a newline character.
-        #
-        #   Programs that read PID files should be somewhat flexible
-        #   in what they accept; i.e., they should ignore extra
-        #   whitespace, leading zeroes, absence of the trailing
-        #   newline, or additional lines in the PID file.
-
-        line = pidfile.readline().strip()
-        try:
-            pid = int(line)
-        except ValueError:
-            pass
-        pidfile.close()
-
-    return pid
-
-
-def write_pid_to_pidfile(pidfile_path):
-    """ Write the PID in the named PID file.
-
-        Get the numeric process ID (\u201cPID\u201d) of the current process
-        and write it to the named file as a line of text.
-
-        """
-    open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
-    open_mode = 0o644
-    pidfile_fd = os.open(pidfile_path, open_flags, open_mode)
-    pidfile = os.fdopen(pidfile_fd, 'w')
-
-    # According to the FHS 2.3 section on PID files in /var/run:
-    #
-    #   The file must consist of the process identifier in
-    #   ASCII-encoded decimal, followed by a newline character. For
-    #   example, if crond was process number 25, /var/run/crond.pid
-    #   would contain three characters: two, five, and newline.
-
-    pid = os.getpid()
-    pidfile.write("%s\n" % pid)
-    pidfile.close()
-
-
-def remove_existing_pidfile(pidfile_path):
-    """ Remove the named PID file if it exists.
-
-        Removing a PID file that doesn't already exist puts us in the
-        desired state, so we ignore the condition if the file does not
-        exist.
-
-        """
-    try:
-        os.remove(pidfile_path)
-    except OSError as exc:
-        if exc.errno == errno.ENOENT:
-            pass
-        else:
-            raise

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py b/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py
deleted file mode 100644
index f997e24..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py
+++ /dev/null
@@ -1,156 +0,0 @@
-from __future__ import absolute_import, division
-
-import time
-import os
-
-try:
-    unicode
-except NameError:
-    unicode = str
-
-from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked
-
-
-class SQLiteLockFile(LockBase):
-    "Demonstrate SQL-based locking."
-
-    testdb = None
-
-    def __init__(self, path, threaded=True, timeout=None):
-        """
-        >>> lock = SQLiteLockFile('somefile')
-        >>> lock = SQLiteLockFile('somefile', threaded=False)
-        """
-        LockBase.__init__(self, path, threaded, timeout)
-        self.lock_file = unicode(self.lock_file)
-        self.unique_name = unicode(self.unique_name)
-
-        if SQLiteLockFile.testdb is None:
-            import tempfile
-            _fd, testdb = tempfile.mkstemp()
-            os.close(_fd)
-            os.unlink(testdb)
-            del _fd, tempfile
-            SQLiteLockFile.testdb = testdb
-
-        import sqlite3
-        self.connection = sqlite3.connect(SQLiteLockFile.testdb)
-
-        c = self.connection.cursor()
-        try:
-            c.execute("create table locks"
-                      "("
-                      "   lock_file varchar(32),"
-                      "   unique_name varchar(32)"
-                      ")")
-        except sqlite3.OperationalError:
-            pass
-        else:
-            self.connection.commit()
-            import atexit
-            atexit.register(os.unlink, SQLiteLockFile.testdb)
-
-    def acquire(self, timeout=None):
-        timeout = timeout if timeout is not None else self.timeout
-        end_time = time.time()
-        if timeout is not None and timeout > 0:
-            end_time += timeout
-
-        if timeout is None:
-            wait = 0.1
-        elif timeout <= 0:
-            wait = 0
-        else:
-            wait = timeout / 10
-
-        cursor = self.connection.cursor()
-
-        while True:
-            if not self.is_locked():
-                # Not locked.  Try to lock it.
-                cursor.execute("insert into locks"
-                               "  (lock_file, unique_name)"
-                               "  values"
-                               "  (?, ?)",
-                               (self.lock_file, self.unique_name))
-                self.connection.commit()
-
-                # Check to see if we are the only lock holder.
-                cursor.execute("select * from locks"
-                               "  where unique_name = ?",
-                               (self.unique_name,))
-                rows = cursor.fetchall()
-                if len(rows) > 1:
-                    # Nope.  Someone else got there.  Remove our lock.
-                    cursor.execute("delete from locks"
-                                   "  where unique_name = ?",
-                                   (self.unique_name,))
-                    self.connection.commit()
-                else:
-                    # Yup.  We're done, so go home.
-                    return
-            else:
-                # Check to see if we are the only lock holder.
-                cursor.execute("select * from locks"
-                               "  where unique_name = ?",
-                               (self.unique_name,))
-                rows = cursor.fetchall()
-                if len(rows) == 1:
-                    # We're the locker, so go home.
-                    return
-
-            # Maybe we should wait a bit longer.
-            if timeout is not None and time.time() > end_time:
-                if timeout > 0:
-                    # No more waiting.
-                    raise LockTimeout("Timeout waiting to acquire"
-                                      " lock for %s" %
-                                      self.path)
-                else:
-                    # Someone else has the lock and we are impatient..
-                    raise AlreadyLocked("%s is already locked" % self.path)
-
-            # Well, okay.  We'll give it a bit longer.
-            time.sleep(wait)
-
-    def release(self):
-        if not self.is_locked():
-            raise NotLocked("%s is not locked" % self.path)
-        if not self.i_am_locking():
-            raise NotMyLock("%s is locked, but not by me (by %s)" %
-                            (self.unique_name, self._who_is_locking()))
-        cursor = self.connection.cursor()
-        cursor.execute("delete from locks"
-                       "  where unique_name = ?",
-                       (self.unique_name,))
-        self.connection.commit()
-
-    def _who_is_locking(self):
-        cursor = self.connection.cursor()
-        cursor.execute("select unique_name from locks"
-                       "  where lock_file = ?",
-                       (self.lock_file,))
-        return cursor.fetchone()[0]
-
-    def is_locked(self):
-        cursor = self.connection.cursor()
-        cursor.execute("select * from locks"
-                       "  where lock_file = ?",
-                       (self.lock_file,))
-        rows = cursor.fetchall()
-        return not not rows
-
-    def i_am_locking(self):
-        cursor = self.connection.cursor()
-        cursor.execute("select * from locks"
-                       "  where lock_file = ?"
-                       "    and unique_name = ?",
-                       (self.lock_file, self.unique_name))
-        return not not cursor.fetchall()
-
-    def break_lock(self):
-        cursor = self.connection.cursor()
-        cursor.execute("delete from locks"
-                       "  where lock_file = ?",
-                       (self.lock_file,))
-        self.connection.commit()

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py b/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py
deleted file mode 100644
index 23b41f5..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from __future__ import absolute_import
-
-import os
-import time
-
-from . import (LockBase, NotLocked, NotMyLock, LockTimeout,
-               AlreadyLocked)
-
-
-class SymlinkLockFile(LockBase):
-    """Lock access to a file using symlink(2)."""
-
-    def __init__(self, path, threaded=True, timeout=None):
-        # super(SymlinkLockFile).__init(...)
-        LockBase.__init__(self, path, threaded, timeout)
-        # split it back!
-        self.unique_name = os.path.split(self.unique_name)[1]
-
-    def acquire(self, timeout=None):
-        # Hopefully unnecessary for symlink.
-        # try:
-        #     open(self.unique_name, "wb").close()
-        # except IOError:
-        #     raise LockFailed("failed to create %s" % self.unique_name)
-        timeout = timeout if timeout is not None else self.timeout
-        end_time = time.time()
-        if timeout is not None and timeout > 0:
-            end_time += timeout
-
-        while True:
-            # Try and create a symbolic link to it.
-            try:
-                os.symlink(self.unique_name, self.lock_file)
-            except OSError:
-                # Link creation failed.  Maybe we've double-locked?
-                if self.i_am_locking():
-                    # Linked to out unique name. Proceed.
-                    return
-                else:
-                    # Otherwise the lock creation failed.
-                    if timeout is not None and time.time() > end_time:
-                        if timeout > 0:
-                            raise LockTimeout("Timeout waiting to acquire"
-                                              " lock for %s" %
-                                              self.path)
-                        else:
-                            raise AlreadyLocked("%s is already locked" %
-                                                self.path)
-                    time.sleep(timeout / 10 if timeout is not None else 0.1)
-            else:
-                # Link creation succeeded.  We're good to go.
-                return
-
-    def release(self):
-        if not self.is_locked():
-            raise NotLocked("%s is not locked" % self.path)
-        elif not self.i_am_locking():
-            raise NotMyLock("%s is locked, but not by me" % self.path)
-        os.unlink(self.lock_file)
-
-    def is_locked(self):
-        return os.path.islink(self.lock_file)
-
-    def i_am_locking(self):
-        return (os.path.islink(self.lock_file)
-                and os.readlink(self.lock_file) == self.unique_name)
-
-    def break_lock(self):
-        if os.path.islink(self.lock_file):  # exists && link
-            os.unlink(self.lock_file)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/ordereddict.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/ordereddict.py b/env2/lib/python2.7/site-packages/pip/_vendor/ordereddict.py
deleted file mode 100644
index 7242b50..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/ordereddict.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Copyright (c) 2009 Raymond Hettinger
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation files
-# (the "Software"), to deal in the Software without restriction,
-# including without limitation the rights to use, copy, modify, merge,
-# publish, distribute, sublicense, and/or sell copies of the Software,
-# and to permit persons to whom the Software is furnished to do so,
-# subject to the following conditions:
-#
-#     The above copyright notice and this permission notice shall be
-#     included in all copies or substantial portions of the Software.
-#
-#     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-#     EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-#     OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-#     NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-#     HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-#     WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-#     FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-#     OTHER DEALINGS IN THE SOFTWARE.
-
-from UserDict import DictMixin
-
-class OrderedDict(dict, DictMixin):
-
-    def __init__(self, *args, **kwds):
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        try:
-            self.__end
-        except AttributeError:
-            self.clear()
-        self.update(*args, **kwds)
-
-    def clear(self):
-        self.__end = end = []
-        end += [None, end, end]         # sentinel node for doubly linked list
-        self.__map = {}                 # key --> [key, prev, next]
-        dict.clear(self)
-
-    def __setitem__(self, key, value):
-        if key not in self:
-            end = self.__end
-            curr = end[1]
-            curr[2] = end[1] = self.__map[key] = [key, curr, end]
-        dict.__setitem__(self, key, value)
-
-    def __delitem__(self, key):
-        dict.__delitem__(self, key)
-        key, prev, next = self.__map.pop(key)
-        prev[2] = next
-        next[1] = prev
-
-    def __iter__(self):
-        end = self.__end
-        curr = end[2]
-        while curr is not end:
-            yield curr[0]
-            curr = curr[2]
-
-    def __reversed__(self):
-        end = self.__end
-        curr = end[1]
-        while curr is not end:
-            yield curr[0]
-            curr = curr[1]
-
-    def popitem(self, last=True):
-        if not self:
-            raise KeyError('dictionary is empty')
-        if last:
-            key = reversed(self).next()
-        else:
-            key = iter(self).next()
-        value = self.pop(key)
-        return key, value
-
-    def __reduce__(self):
-        items = [[k, self[k]] for k in self]
-        tmp = self.__map, self.__end
-        del self.__map, self.__end
-        inst_dict = vars(self).copy()
-        self.__map, self.__end = tmp
-        if inst_dict:
-            return (self.__class__, (items,), inst_dict)
-        return self.__class__, (items,)
-
-    def keys(self):
-        return list(self)
-
-    setdefault = DictMixin.setdefault
-    update = DictMixin.update
-    pop = DictMixin.pop
-    values = DictMixin.values
-    items = DictMixin.items
-    iterkeys = DictMixin.iterkeys
-    itervalues = DictMixin.itervalues
-    iteritems = DictMixin.iteritems
-
-    def __repr__(self):
-        if not self:
-            return '%s()' % (self.__class__.__name__,)
-        return '%s(%r)' % (self.__class__.__name__, self.items())
-
-    def copy(self):
-        return self.__class__(self)
-
-    @classmethod
-    def fromkeys(cls, iterable, value=None):
-        d = cls()
-        for key in iterable:
-            d[key] = value
-        return d
-
-    def __eq__(self, other):
-        if isinstance(other, OrderedDict):
-            if len(self) != len(other):
-                return False
-            for p, q in  zip(self.items(), other.items()):
-                if p != q:
-                    return False
-            return True
-        return dict.__eq__(self, other)
-
-    def __ne__(self, other):
-        return not self == other

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py
deleted file mode 100644
index 95d330e..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-__all__ = [
-    "__title__", "__summary__", "__uri__", "__version__", "__author__",
-    "__email__", "__license__", "__copyright__",
-]
-
-__title__ = "packaging"
-__summary__ = "Core utilities for Python packages"
-__uri__ = "https://github.com/pypa/packaging"
-
-__version__ = "16.8"
-
-__author__ = "Donald Stufft and individual contributors"
-__email__ = "donald@stufft.io"
-
-__license__ = "BSD or Apache License, Version 2.0"
-__copyright__ = "Copyright 2014-2016 %s" % __author__

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py
deleted file mode 100644
index 5ee6220..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-from .__about__ import (
-    __author__, __copyright__, __email__, __license__, __summary__, __title__,
-    __uri__, __version__
-)
-
-__all__ = [
-    "__title__", "__summary__", "__uri__", "__version__", "__author__",
-    "__email__", "__license__", "__copyright__",
-]

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py
deleted file mode 100644
index 210bb80..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import sys
-
-
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-
-# flake8: noqa
-
-if PY3:
-    string_types = str,
-else:
-    string_types = basestring,
-
-
-def with_metaclass(meta, *bases):
-    """
-    Create a base class with a metaclass.
-    """
-    # This requires a bit of explanation: the basic idea is to make a dummy
-    # metaclass for one level of class instantiation that replaces itself with
-    # the actual metaclass.
-    class metaclass(meta):
-        def __new__(cls, name, this_bases, d):
-            return meta(name, bases, d)
-    return type.__new__(metaclass, 'temporary_class', (), {})

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py
deleted file mode 100644
index ccc2786..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-
-class Infinity(object):
-
-    def __repr__(self):
-        return "Infinity"
-
-    def __hash__(self):
-        return hash(repr(self))
-
-    def __lt__(self, other):
-        return False
-
-    def __le__(self, other):
-        return False
-
-    def __eq__(self, other):
-        return isinstance(other, self.__class__)
-
-    def __ne__(self, other):
-        return not isinstance(other, self.__class__)
-
-    def __gt__(self, other):
-        return True
-
-    def __ge__(self, other):
-        return True
-
-    def __neg__(self):
-        return NegativeInfinity
-
-Infinity = Infinity()
-
-
-class NegativeInfinity(object):
-
-    def __repr__(self):
-        return "-Infinity"
-
-    def __hash__(self):
-        return hash(repr(self))
-
-    def __lt__(self, other):
-        return True
-
-    def __le__(self, other):
-        return True
-
-    def __eq__(self, other):
-        return isinstance(other, self.__class__)
-
-    def __ne__(self, other):
-        return not isinstance(other, self.__class__)
-
-    def __gt__(self, other):
-        return False
-
-    def __ge__(self, other):
-        return False
-
-    def __neg__(self):
-        return Infinity
-
-NegativeInfinity = NegativeInfinity()

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/markers.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/markers.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/markers.py
deleted file mode 100644
index f9ca1ff..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/markers.py
+++ /dev/null
@@ -1,303 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import operator
-import os
-import platform
-import sys
-
-from pip._vendor.pyparsing import (
-    ParseException, ParseResults, stringStart, stringEnd,
-)
-from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString
-from pip._vendor.pyparsing import Literal as L  # noqa
-
-from ._compat import string_types
-from .specifiers import Specifier, InvalidSpecifier
-
-
-__all__ = [
-    "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
-    "Marker", "default_environment",
-]
-
-
-class InvalidMarker(ValueError):
-    """
-    An invalid marker was found, users should refer to PEP 508.
-    """
-
-
-class UndefinedComparison(ValueError):
-    """
-    An invalid operation was attempted on a value that doesn't support it.
-    """
-
-
-class UndefinedEnvironmentName(ValueError):
-    """
-    A name was attempted to be used that does not exist inside of the
-    environment.
-    """
-
-
-class Node(object):
-
-    def __init__(self, value):
-        self.value = value
-
-    def __str__(self):
-        return str(self.value)
-
-    def __repr__(self):
-        return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
-
-    def serialize(self):
-        raise NotImplementedError
-
-
-class Variable(Node):
-
-    def serialize(self):
-        return str(self)
-
-
-class Value(Node):
-
-    def serialize(self):
-        return '"{0}"'.format(self)
-
-
-class Op(Node):
-
-    def serialize(self):
-        return str(self)
-
-
-VARIABLE = (
-    L("implementation_version") |
-    L("platform_python_implementation") |
-    L("implementation_name") |
-    L("python_full_version") |
-    L("platform_release") |
-    L("platform_version") |
-    L("platform_machine") |
-    L("platform_system") |
-    L("python_version") |
-    L("sys_platform") |
-    L("os_name") |
-    L("os.name") |  # PEP-345
-    L("sys.platform") |  # PEP-345
-    L("platform.version") |  # PEP-345
-    L("platform.machine") |  # PEP-345
-    L("platform.python_implementation") |  # PEP-345
-    L("python_implementation") |  # undocumented setuptools legacy
-    L("extra")
-)
-ALIASES = {
-    'os.name': 'os_name',
-    'sys.platform': 'sys_platform',
-    'platform.version': 'platform_version',
-    'platform.machine': 'platform_machine',
-    'platform.python_implementation': 'platform_python_implementation',
-    'python_implementation': 'platform_python_implementation'
-}
-VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
-
-VERSION_CMP = (
-    L("===") |
-    L("==") |
-    L(">=") |
-    L("<=") |
-    L("!=") |
-    L("~=") |
-    L(">") |
-    L("<")
-)
-
-MARKER_OP = VERSION_CMP | L("not in") | L("in")
-MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
-
-MARKER_VALUE = QuotedString("'") | QuotedString('"')
-MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
-
-BOOLOP = L("and") | L("or")
-
-MARKER_VAR = VARIABLE | MARKER_VALUE
-
-MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
-MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
-
-LPAREN = L("(").suppress()
-RPAREN = L(")").suppress()
-
-MARKER_EXPR = Forward()
-MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
-MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
-
-MARKER = stringStart + MARKER_EXPR + stringEnd
-
-
-def _coerce_parse_result(results):
-    if isinstance(results, ParseResults):
-        return [_coerce_parse_result(i) for i in results]
-    else:
-        return results
-
-
-def _format_marker(marker, first=True):
-    assert isinstance(marker, (list, tuple, string_types))
-
-    # Sometimes we have a structure like [[...]] which is a single item list
-    # where the single item is itself it's own list. In that case we want skip
-    # the rest of this function so that we don't get extraneous () on the
-    # outside.
-    if (isinstance(marker, list) and len(marker) == 1 and
-            isinstance(marker[0], (list, tuple))):
-        return _format_marker(marker[0])
-
-    if isinstance(marker, list):
-        inner = (_format_marker(m, first=False) for m in marker)
-        if first:
-            return " ".join(inner)
-        else:
-            return "(" + " ".join(inner) + ")"
-    elif isinstance(marker, tuple):
-        return " ".join([m.serialize() for m in marker])
-    else:
-        return marker
-
-
-_operators = {
-    "in": lambda lhs, rhs: lhs in rhs,
-    "not in": lambda lhs, rhs: lhs not in rhs,
-    "<": operator.lt,
-    "<=": operator.le,
-    "==": operator.eq,
-    "!=": operator.ne,
-    ">=": operator.ge,
-    ">": operator.gt,
-}
-
-
-def _eval_op(lhs, op, rhs):
-    try:
-        spec = Specifier("".join([op.serialize(), rhs]))
-    except InvalidSpecifier:
-        pass
-    else:
-        return spec.contains(lhs)
-
-    oper = _operators.get(op.serialize())
-    if oper is None:
-        raise UndefinedComparison(
-            "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
-        )
-
-    return oper(lhs, rhs)
-
-
-_undefined = object()
-
-
-def _get_env(environment, name):
-    value = environment.get(name, _undefined)
-
-    if value is _undefined:
-        raise UndefinedEnvironmentName(
-            "{0!r} does not exist in evaluation environment.".format(name)
-        )
-
-    return value
-
-
-def _evaluate_markers(markers, environment):
-    groups = [[]]
-
-    for marker in markers:
-        assert isinstance(marker, (list, tuple, string_types))
-
-        if isinstance(marker, list):
-            groups[-1].append(_evaluate_markers(marker, environment))
-        elif isinstance(marker, tuple):
-            lhs, op, rhs = marker
-
-            if isinstance(lhs, Variable):
-                lhs_value = _get_env(environment, lhs.value)
-                rhs_value = rhs.value
-            else:
-                lhs_value = lhs.value
-                rhs_value = _get_env(environment, rhs.value)
-
-            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
-        else:
-            assert marker in ["and", "or"]
-            if marker == "or":
-                groups.append([])
-
-    return any(all(item) for item in groups)
-
-
-def format_full_version(info):
-    version = '{0.major}.{0.minor}.{0.micro}'.format(info)
-    kind = info.releaselevel
-    if kind != 'final':
-        version += kind[0] + str(info.serial)
-    return version
-
-
-def default_environment():
-    if hasattr(sys, 'implementation'):
-        iver = format_full_version(sys.implementation.version)
-        implementation_name = sys.implementation.name
-    else:
-        iver = '0'
-        implementation_name = ''
-
-    return {
-        "implementation_name": implementation_name,
-        "implementation_version": iver,
-        "os_name": os.name,
-        "platform_machine": platform.machine(),
-        "platform_release": platform.release(),
-        "platform_system": platform.system(),
-        "platform_version": platform.version(),
-        "python_full_version": platform.python_version(),
-        "platform_python_implementation": platform.python_implementation(),
-        "python_version": platform.python_version()[:3],
-        "sys_platform": sys.platform,
-    }
-
-
-class Marker(object):
-
-    def __init__(self, marker):
-        try:
-            self._markers = _coerce_parse_result(MARKER.parseString(marker))
-        except ParseException as e:
-            err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
-                marker, marker[e.loc:e.loc + 8])
-            raise InvalidMarker(err_str)
-
-    def __str__(self):
-        return _format_marker(self._markers)
-
-    def __repr__(self):
-        return "<Marker({0!r})>".format(str(self))
-
-    def evaluate(self, environment=None):
-        """Evaluate a marker.
-
-        Return the boolean from evaluating the given marker against the
-        environment. environment is an optional argument to override all or
-        part of the determined environment.
-
-        The environment is determined from the current Python process.
-        """
-        current_environment = default_environment()
-        if environment is not None:
-            current_environment.update(environment)
-
-        return _evaluate_markers(self._markers, current_environment)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.py
deleted file mode 100644
index 49a4385..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/requirements.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import string
-import re
-
-from pip._vendor.pyparsing import (
-    stringStart, stringEnd, originalTextFor, ParseException
-)
-from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
-from pip._vendor.pyparsing import Literal as L  # noqa
-from pip._vendor.six.moves.urllib import parse as urlparse
-
-from .markers import MARKER_EXPR, Marker
-from .specifiers import LegacySpecifier, Specifier, SpecifierSet
-
-
-class InvalidRequirement(ValueError):
-    """
-    An invalid requirement was found, users should refer to PEP 508.
-    """
-
-
-ALPHANUM = Word(string.ascii_letters + string.digits)
-
-LBRACKET = L("[").suppress()
-RBRACKET = L("]").suppress()
-LPAREN = L("(").suppress()
-RPAREN = L(")").suppress()
-COMMA = L(",").suppress()
-SEMICOLON = L(";").suppress()
-AT = L("@").suppress()
-
-PUNCTUATION = Word("-_.")
-IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
-IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
-
-NAME = IDENTIFIER("name")
-EXTRA = IDENTIFIER
-
-URI = Regex(r'[^ ]+')("url")
-URL = (AT + URI)
-
-EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
-EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
-
-VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
-VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
-
-VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
-VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
-                       joinString=",", adjacent=False)("_raw_spec")
-_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
-_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
-
-VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
-VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
-
-MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
-MARKER_EXPR.setParseAction(
-    lambda s, l, t: Marker(s[t._original_start:t._original_end])
-)
-MARKER_SEPERATOR = SEMICOLON
-MARKER = MARKER_SEPERATOR + MARKER_EXPR
-
-VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
-URL_AND_MARKER = URL + Optional(MARKER)
-
-NAMED_REQUIREMENT = \
-    NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
-
-REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
-
-
-class Requirement(object):
-    """Parse a requirement.
-
-    Parse a given requirement string into its parts, such as name, specifier,
-    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
-    string.
-    """
-
-    # TODO: Can we test whether something is contained within a requirement?
-    #       If so how do we do that? Do we need to test against the _name_ of
-    #       the thing as well as the version? What about the markers?
-    # TODO: Can we normalize the name and extra name?
-
-    def __init__(self, requirement_string):
-        try:
-            req = REQUIREMENT.parseString(requirement_string)
-        except ParseException as e:
-            raise InvalidRequirement(
-                "Invalid requirement, parse error at \"{0!r}\"".format(
-                    requirement_string[e.loc:e.loc + 8]))
-
-        self.name = req.name
-        if req.url:
-            parsed_url = urlparse.urlparse(req.url)
-            if not (parsed_url.scheme and parsed_url.netloc) or (
-                    not parsed_url.scheme and not parsed_url.netloc):
-                raise InvalidRequirement("Invalid URL given")
-            self.url = req.url
-        else:
-            self.url = None
-        self.extras = set(req.extras.asList() if req.extras else [])
-        self.specifier = SpecifierSet(req.specifier)
-        self.marker = req.marker if req.marker else None
-
-    def __str__(self):
-        parts = [self.name]
-
-        if self.extras:
-            parts.append("[{0}]".format(",".join(sorted(self.extras))))
-
-        if self.specifier:
-            parts.append(str(self.specifier))
-
-        if self.url:
-            parts.append("@ {0}".format(self.url))
-
-        if self.marker:
-            parts.append("; {0}".format(self.marker))
-
-        return "".join(parts)
-
-    def __repr__(self):
-        return "<Requirement({0!r})>".format(str(self))

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py
deleted file mode 100644
index 7f5a76c..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py
+++ /dev/null
@@ -1,774 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import abc
-import functools
-import itertools
-import re
-
-from ._compat import string_types, with_metaclass
-from .version import Version, LegacyVersion, parse
-
-
-class InvalidSpecifier(ValueError):
-    """
-    An invalid specifier was found, users should refer to PEP 440.
-    """
-
-
-class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
-
-    @abc.abstractmethod
-    def __str__(self):
-        """
-        Returns the str representation of this Specifier like object. This
-        should be representative of the Specifier itself.
-        """
-
-    @abc.abstractmethod
-    def __hash__(self):
-        """
-        Returns a hash value for this Specifier like object.
-        """
-
-    @abc.abstractmethod
-    def __eq__(self, other):
-        """
-        Returns a boolean representing whether or not the two Specifier like
-        objects are equal.
-        """
-
-    @abc.abstractmethod
-    def __ne__(self, other):
-        """
-        Returns a boolean representing whether or not the two Specifier like
-        objects are not equal.
-        """
-
-    @abc.abstractproperty
-    def prereleases(self):
-        """
-        Returns whether or not pre-releases as a whole are allowed by this
-        specifier.
-        """
-
-    @prereleases.setter
-    def prereleases(self, value):
-        """
-        Sets whether or not pre-releases as a whole are allowed by this
-        specifier.
-        """
-
-    @abc.abstractmethod
-    def contains(self, item, prereleases=None):
-        """
-        Determines if the given item is contained within this specifier.
-        """
-
-    @abc.abstractmethod
-    def filter(self, iterable, prereleases=None):
-        """
-        Takes an iterable of items and filters them so that only items which
-        are contained within this specifier are allowed in it.
-        """
-
-
-class _IndividualSpecifier(BaseSpecifier):
-
-    _operators = {}
-
-    def __init__(self, spec="", prereleases=None):
-        match = self._regex.search(spec)
-        if not match:
-            raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
-
-        self._spec = (
-            match.group("operator").strip(),
-            match.group("version").strip(),
-        )
-
-        # Store whether or not this Specifier should accept prereleases
-        self._prereleases = prereleases
-
-    def __repr__(self):
-        pre = (
-            ", prereleases={0!r}".format(self.prereleases)
-            if self._prereleases is not None
-            else ""
-        )
-
-        return "<{0}({1!r}{2})>".format(
-            self.__class__.__name__,
-            str(self),
-            pre,
-        )
-
-    def __str__(self):
-        return "{0}{1}".format(*self._spec)
-
-    def __hash__(self):
-        return hash(self._spec)
-
-    def __eq__(self, other):
-        if isinstance(other, string_types):
-            try:
-                other = self.__class__(other)
-            except InvalidSpecifier:
-                return NotImplemented
-        elif not isinstance(other, self.__class__):
-            return NotImplemented
-
-        return self._spec == other._spec
-
-    def __ne__(self, other):
-        if isinstance(other, string_types):
-            try:
-                other = self.__class__(other)
-            except InvalidSpecifier:
-                return NotImplemented
-        elif not isinstance(other, self.__class__):
-            return NotImplemented
-
-        return self._spec != other._spec
-
-    def _get_operator(self, op):
-        return getattr(self, "_compare_{0}".format(self._operators[op]))
-
-    def _coerce_version(self, version):
-        if not isinstance(version, (LegacyVersion, Version)):
-            version = parse(version)
-        return version
-
-    @property
-    def operator(self):
-        return self._spec[0]
-
-    @property
-    def version(self):
-        return self._spec[1]
-
-    @property
-    def prereleases(self):
-        return self._prereleases
-
-    @prereleases.setter
-    def prereleases(self, value):
-        self._prereleases = value
-
-    def __contains__(self, item):
-        return self.contains(item)
-
-    def contains(self, item, prereleases=None):
-        # Determine if prereleases are to be allowed or not.
-        if prereleases is None:
-            prereleases = self.prereleases
-
-        # Normalize item to a Version or LegacyVersion, this allows us to have
-        # a shortcut for ``"2.0" in Specifier(">=2")
-        item = self._coerce_version(item)
-
-        # Determine if we should be supporting prereleases in this specifier
-        # or not, if we do not support prereleases than we can short circuit
-        # logic if this version is a prereleases.
-        if item.is_prerelease and not prereleases:
-            return False
-
-        # Actually do the comparison to determine if this item is contained
-        # within this Specifier or not.
-        return self._get_operator(self.operator)(item, self.version)
-
-    def filter(self, iterable, prereleases=None):
-        yielded = False
-        found_prereleases = []
-
-        kw = {"prereleases": prereleases if prereleases is not None else True}
-
-        # Attempt to iterate over all the values in the iterable and if any of
-        # them match, yield them.
-        for version in iterable:
-            parsed_version = self._coerce_version(version)
-
-            if self.contains(parsed_version, **kw):
-                # If our version is a prerelease, and we were not set to allow
-                # prereleases, then we'll store it for later incase nothing
-                # else matches this specifier.
-                if (parsed_version.is_prerelease and not
-                        (prereleases or self.prereleases)):
-                    found_prereleases.append(version)
-                # Either this is not a prerelease, or we should have been
-                # accepting prereleases from the begining.
-                else:
-                    yielded = True
-                    yield version
-
-        # Now that we've iterated over everything, determine if we've yielded
-        # any values, and if we have not and we have any prereleases stored up
-        # then we will go ahead and yield the prereleases.
-        if not yielded and found_prereleases:
-            for version in found_prereleases:
-                yield version
-
-
-class LegacySpecifier(_IndividualSpecifier):
-
-    _regex_str = (
-        r"""
-        (?P<operator>(==|!=|<=|>=|<|>))
-        \s*
-        (?P<version>
-            [^,;\s)]* # Since this is a "legacy" specifier, and the version
-                      # string can be just about anything, we match everything
-                      # except for whitespace, a semi-colon for marker support,
-                      # a closing paren since versions can be enclosed in
-                      # them, and a comma since it's a version separator.
-        )
-        """
-    )
-
-    _regex = re.compile(
-        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
-    _operators = {
-        "==": "equal",
-        "!=": "not_equal",
-        "<=": "less_than_equal",
-        ">=": "greater_than_equal",
-        "<": "less_than",
-        ">": "greater_than",
-    }
-
-    def _coerce_version(self, version):
-        if not isinstance(version, LegacyVersion):
-            version = LegacyVersion(str(version))
-        return version
-
-    def _compare_equal(self, prospective, spec):
-        return prospective == self._coerce_version(spec)
-
-    def _compare_not_equal(self, prospective, spec):
-        return prospective != self._coerce_version(spec)
-
-    def _compare_less_than_equal(self, prospective, spec):
-        return prospective <= self._coerce_version(spec)
-
-    def _compare_greater_than_equal(self, prospective, spec):
-        return prospective >= self._coerce_version(spec)
-
-    def _compare_less_than(self, prospective, spec):
-        return prospective < self._coerce_version(spec)
-
-    def _compare_greater_than(self, prospective, spec):
-        return prospective > self._coerce_version(spec)
-
-
-def _require_version_compare(fn):
-    @functools.wraps(fn)
-    def wrapped(self, prospective, spec):
-        if not isinstance(prospective, Version):
-            return False
-        return fn(self, prospective, spec)
-    return wrapped
-
-
-class Specifier(_IndividualSpecifier):
-
-    _regex_str = (
-        r"""
-        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
-        (?P<version>
-            (?:
-                # The identity operators allow for an escape hatch that will
-                # do an exact string match of the version you wish to install.
-                # This will not be parsed by PEP 440 and we cannot determine
-                # any semantic meaning from it. This operator is discouraged
-                # but included entirely as an escape hatch.
-                (?<====)  # Only match for the identity operator
-                \s*
-                [^\s]*    # We just match everything, except for whitespace
-                          # since we are only testing for strict identity.
-            )
-            |
-            (?:
-                # The (non)equality operators allow for wild card and local
-                # versions to be specified so we have to define these two
-                # operators separately to enable that.
-                (?<===|!=)            # Only match for equals and not equals
-
-                \s*
-                v?
-                (?:[0-9]+!)?          # epoch
-                [0-9]+(?:\.[0-9]+)*   # release
-                (?:                   # pre release
-                    [-_\.]?
-                    (a|b|c|rc|alpha|beta|pre|preview)
-                    [-_\.]?
-                    [0-9]*
-                )?
-                (?:                   # post release
-                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
-                )?
-
-                # You cannot use a wild card and a dev or local version
-                # together so group them with a | and make them optional.
-                (?:
-                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
-                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
-                    |
-                    \.\*  # Wild card syntax of .*
-                )?
-            )
-            |
-            (?:
-                # The compatible operator requires at least two digits in the
-                # release segment.
-                (?<=~=)               # Only match for the compatible operator
-
-                \s*
-                v?
-                (?:[0-9]+!)?          # epoch
-                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
-                (?:                   # pre release
-                    [-_\.]?
-                    (a|b|c|rc|alpha|beta|pre|preview)
-                    [-_\.]?
-                    [0-9]*
-                )?
-                (?:                                   # post release
-                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
-                )?
-                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
-            )
-            |
-            (?:
-                # All other operators only allow a sub set of what the
-                # (non)equality operators do. Specifically they do not allow
-                # local versions to be specified nor do they allow the prefix
-                # matching wild cards.
-                (?<!==|!=|~=)         # We have special cases for these
-                                      # operators so we want to make sure they
-                                      # don't match here.
-
-                \s*
-                v?
-                (?:[0-9]+!)?          # epoch
-                [0-9]+(?:\.[0-9]+)*   # release
-                (?:                   # pre release
-                    [-_\.]?
-                    (a|b|c|rc|alpha|beta|pre|preview)
-                    [-_\.]?
-                    [0-9]*
-                )?
-                (?:                                   # post release
-                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
-                )?
-                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
-            )
-        )
-        """
-    )
-
-    _regex = re.compile(
-        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
-    _operators = {
-        "~=": "compatible",
-        "==": "equal",
-        "!=": "not_equal",
-        "<=": "less_than_equal",
-        ">=": "greater_than_equal",
-        "<": "less_than",
-        ">": "greater_than",
-        "===": "arbitrary",
-    }
-
-    @_require_version_compare
-    def _compare_compatible(self, prospective, spec):
-        # Compatible releases have an equivalent combination of >= and ==. That
-        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
-        # implement this in terms of the other specifiers instead of
-        # implementing it ourselves. The only thing we need to do is construct
-        # the other specifiers.
-
-        # We want everything but the last item in the version, but we want to
-        # ignore post and dev releases and we want to treat the pre-release as
-        # it's own separate segment.
-        prefix = ".".join(
-            list(
-                itertools.takewhile(
-                    lambda x: (not x.startswith("post") and not
-                               x.startswith("dev")),
-                    _version_split(spec),
-                )
-            )[:-1]
-        )
-
-        # Add the prefix notation to the end of our string
-        prefix += ".*"
-
-        return (self._get_operator(">=")(prospective, spec) and
-                self._get_operator("==")(prospective, prefix))
-
-    @_require_version_compare
-    def _compare_equal(self, prospective, spec):
-        # We need special logic to handle prefix matching
-        if spec.endswith(".*"):
-            # In the case of prefix matching we want to ignore local segment.
-            prospective = Version(prospective.public)
-            # Split the spec out by dots, and pretend that there is an implicit
-            # dot in between a release segment and a pre-release segment.
-            spec = _version_split(spec[:-2])  # Remove the trailing .*
-
-            # Split the prospective version out by dots, and pretend that there
-            # is an implicit dot in between a release segment and a pre-release
-            # segment.
-            prospective = _version_split(str(prospective))
-
-            # Shorten the prospective version to be the same length as the spec
-            # so that we can determine if the specifier is a prefix of the
-            # prospective version or not.
-            prospective = prospective[:len(spec)]
-
-            # Pad out our two sides with zeros so that they both equal the same
-            # length.
-            spec, prospective = _pad_version(spec, prospective)
-        else:
-            # Convert our spec string into a Version
-            spec = Version(spec)
-
-            # If the specifier does not have a local segment, then we want to
-            # act as if the prospective version also does not have a local
-            # segment.
-            if not spec.local:
-                prospective = Version(prospective.public)
-
-        return prospective == spec
-
-    @_require_version_compare
-    def _compare_not_equal(self, prospective, spec):
-        return not self._compare_equal(prospective, spec)
-
-    @_require_version_compare
-    def _compare_less_than_equal(self, prospective, spec):
-        return prospective <= Version(spec)
-
-    @_require_version_compare
-    def _compare_greater_than_equal(self, prospective, spec):
-        return prospective >= Version(spec)
-
-    @_require_version_compare
-    def _compare_less_than(self, prospective, spec):
-        # Convert our spec to a Version instance, since we'll want to work with
-        # it as a version.
-        spec = Version(spec)
-
-        # Check to see if the prospective version is less than the spec
-        # version. If it's not we can short circuit and just return False now
-        # instead of doing extra unneeded work.
-        if not prospective < spec:
-            return False
-
-        # This special case is here so that, unless the specifier itself
-        # includes is a pre-release version, that we do not accept pre-release
-        # versions for the version mentioned in the specifier (e.g. <3.1 should
-        # not match 3.1.dev0, but should match 3.0.dev0).
-        if not spec.is_prerelease and prospective.is_prerelease:
-            if Version(prospective.base_version) == Version(spec.base_version):
-                return False
-
-        # If we've gotten to here, it means that prospective version is both
-        # less than the spec version *and* it's not a pre-release of the same
-        # version in the spec.
-        return True
-
-    @_require_version_compare
-    def _compare_greater_than(self, prospective, spec):
-        # Convert our spec to a Version instance, since we'll want to work with
-        # it as a version.
-        spec = Version(spec)
-
-        # Check to see if the prospective version is greater than the spec
-        # version. If it's not we can short circuit and just return False now
-        # instead of doing extra unneeded work.
-        if not prospective > spec:
-            return False
-
-        # This special case is here so that, unless the specifier itself
-        # includes is a post-release version, that we do not accept
-        # post-release versions for the version mentioned in the specifier
-        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
-        if not spec.is_postrelease and prospective.is_postrelease:
-            if Version(prospective.base_version) == Version(spec.base_version):
-                return False
-
-        # Ensure that we do not allow a local version of the version mentioned
-        # in the specifier, which is techincally greater than, to match.
-        if prospective.local is not None:
-            if Version(prospective.base_version) == Version(spec.base_version):
-                return False
-
-        # If we've gotten to here, it means that prospective version is both
-        # greater than the spec version *and* it's not a pre-release of the
-        # same version in the spec.
-        return True
-
-    def _compare_arbitrary(self, prospective, spec):
-        return str(prospective).lower() == str(spec).lower()
-
-    @property
-    def prereleases(self):
-        # If there is an explicit prereleases set for this, then we'll just
-        # blindly use that.
-        if self._prereleases is not None:
-            return self._prereleases
-
-        # Look at all of our specifiers and determine if they are inclusive
-        # operators, and if they are if they are including an explicit
-        # prerelease.
-        operator, version = self._spec
-        if operator in ["==", ">=", "<=", "~=", "==="]:
-            # The == specifier can include a trailing .*, if it does we
-            # want to remove before parsing.
-            if operator == "==" and version.endswith(".*"):
-                version = version[:-2]
-
-            # Parse the version, and if it is a pre-release than this
-            # specifier allows pre-releases.
-            if parse(version).is_prerelease:
-                return True
-
-        return False
-
-    @prereleases.setter
-    def prereleases(self, value):
-        self._prereleases = value
-
-
-_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
-
-
-def _version_split(version):
-    result = []
-    for item in version.split("."):
-        match = _prefix_regex.search(item)
-        if match:
-            result.extend(match.groups())
-        else:
-            result.append(item)
-    return result
-
-
-def _pad_version(left, right):
-    left_split, right_split = [], []
-
-    # Get the release segment of our versions
-    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
-    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
-
-    # Get the rest of our versions
-    left_split.append(left[len(left_split[0]):])
-    right_split.append(right[len(right_split[0]):])
-
-    # Insert our padding
-    left_split.insert(
-        1,
-        ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
-    )
-    right_split.insert(
-        1,
-        ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
-    )
-
-    return (
-        list(itertools.chain(*left_split)),
-        list(itertools.chain(*right_split)),
-    )
-
-
-class SpecifierSet(BaseSpecifier):
-
-    def __init__(self, specifiers="", prereleases=None):
-        # Split on , to break each indidivual specifier into it's own item, and
-        # strip each item to remove leading/trailing whitespace.
-        specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
-
-        # Parsed each individual specifier, attempting first to make it a
-        # Specifier and falling back to a LegacySpecifier.
-        parsed = set()
-        for specifier in specifiers:
-            try:
-                parsed.add(Specifier(specifier))
-            except InvalidSpecifier:
-                parsed.add(LegacySpecifier(specifier))
-
-        # Turn our parsed specifiers into a frozen set and save them for later.
-        self._specs = frozenset(parsed)
-
-        # Store our prereleases value so we can use it later to determine if
-        # we accept prereleases or not.
-        self._prereleases = prereleases
-
-    def __repr__(self):
-        pre = (
-            ", prereleases={0!r}".format(self.prereleases)
-            if self._prereleases is not None
-            else ""
-        )
-
-        return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
-
-    def __str__(self):
-        return ",".join(sorted(str(s) for s in self._specs))
-
-    def __hash__(self):
-        return hash(self._specs)
-
-    def __and__(self, other):
-        if isinstance(other, string_types):
-            other = SpecifierSet(other)
-        elif not isinstance(other, SpecifierSet):
-            return NotImplemented
-
-        specifier = SpecifierSet()
-        specifier._specs = frozenset(self._specs | other._specs)
-
-        if self._prereleases is None and other._prereleases is not None:
-            specifier._prereleases = other._prereleases
-        elif self._prereleases is not None and other._prereleases is None:
-            specifier._prereleases = self._prereleases
-        elif self._prereleases == other._prereleases:
-            specifier._prereleases = self._prereleases
-        else:
-            raise ValueError(
-                "Cannot combine SpecifierSets with True and False prerelease "
-                "overrides."
-            )
-
-        return specifier
-
-    def __eq__(self, other):
-        if isinstance(other, string_types):
-            other = SpecifierSet(other)
-        elif isinstance(other, _IndividualSpecifier):
-            other = SpecifierSet(str(other))
-        elif not isinstance(other, SpecifierSet):
-            return NotImplemented
-
-        return self._specs == other._specs
-
-    def __ne__(self, other):
-        if isinstance(other, string_types):
-            other = SpecifierSet(other)
-        elif isinstance(other, _IndividualSpecifier):
-            other = SpecifierSet(str(other))
-        elif not isinstance(other, SpecifierSet):
-            return NotImplemented
-
-        return self._specs != other._specs
-
-    def __len__(self):
-        return len(self._specs)
-
-    def __iter__(self):
-        return iter(self._specs)
-
-    @property
-    def prereleases(self):
-        # If we have been given an explicit prerelease modifier, then we'll
-        # pass that through here.
-        if self._prereleases is not None:
-            return self._prereleases
-
-        # If we don't have any specifiers, and we don't have a forced value,
-        # then we'll just return None since we don't know if this should have
-        # pre-releases or not.
-        if not self._specs:
-            return None
-
-        # Otherwise we'll see if any of the given specifiers accept
-        # prereleases, if any of them do we'll return True, otherwise False.
-        return any(s.prereleases for s in self._specs)
-
-    @prereleases.setter
-    def prereleases(self, value):
-        self._prereleases = value
-
-    def __contains__(self, item):
-        return self.contains(item)
-
-    def contains(self, item, prereleases=None):
-        # Ensure that our item is a Version or LegacyVersion instance.
-        if not isinstance(item, (LegacyVersion, Version)):
-            item = parse(item)
-
-        # Determine if we're forcing a prerelease or not, if we're not forcing
-        # one for this particular filter call, then we'll use whatever the
-        # SpecifierSet thinks for whether or not we should support prereleases.
-        if prereleases is None:
-            prereleases = self.prereleases
-
-        # We can determine if we're going to allow pre-releases by looking to
-        # see if any of the underlying items supports them. If none of them do
-        # and this item is a pre-release then we do not allow it and we can
-        # short circuit that here.
-        # Note: This means that 1.0.dev1 would not be contained in something
-        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
-        if not prereleases and item.is_prerelease:
-            return False
-
-        # We simply dispatch to the underlying specs here to make sure that the
-        # given version is contained within all of them.
-        # Note: This use of all() here means that an empty set of specifiers
-        #       will always return True, this is an explicit design decision.
-        return all(
-            s.contains(item, prereleases=prereleases)
-            for s in self._specs
-        )
-
-    def filter(self, iterable, prereleases=None):
-        # Determine if we're forcing a prerelease or not, if we're not forcing
-        # one for this particular filter call, then we'll use whatever the
-        # SpecifierSet thinks for whether or not we should support prereleases.
-        if prereleases is None:
-            prereleases = self.prereleases
-
-        # If we have any specifiers, then we want to wrap our iterable in the
-        # filter method for each one, this will act as a logical AND amongst
-        # each specifier.
-        if self._specs:
-            for spec in self._specs:
-                iterable = spec.filter(iterable, prereleases=bool(prereleases))
-            return iterable
-        # If we do not have any specifiers, then we need to have a rough filter
-        # which will filter out any pre-releases, unless there are no final
-        # releases, and which will filter out LegacyVersion in general.
-        else:
-            filtered = []
-            found_prereleases = []
-
-            for item in iterable:
-                # Ensure that we some kind of Version class for this item.
-                if not isinstance(item, (LegacyVersion, Version)):
-                    parsed_version = parse(item)
-                else:
-                    parsed_version = item
-
-                # Filter out any item which is parsed as a LegacyVersion
-                if isinstance(parsed_version, LegacyVersion):
-                    continue
-
-                # Store any item which is a pre-release for later unless we've
-                # already found a final version or we are accepting prereleases
-                if parsed_version.is_prerelease and not prereleases:
-                    if not filtered:
-                        found_prereleases.append(item)
-                else:
-                    filtered.append(item)
-
-            # If we've found no items except for pre-releases, then we'll go
-            # ahead and use the pre-releases
-            if not filtered and found_prereleases and prereleases is None:
-                return found_prereleases
-
-            return filtered

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/packaging/utils.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/utils.py b/env2/lib/python2.7/site-packages/pip/_vendor/packaging/utils.py
deleted file mode 100644
index 942387c..0000000
--- a/env2/lib/python2.7/site-packages/pip/_vendor/packaging/utils.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is dual licensed under the terms of the Apache License, Version
-# 2.0, and the BSD License. See the LICENSE file in the root of this repository
-# for complete details.
-from __future__ import absolute_import, division, print_function
-
-import re
-
-
-_canonicalize_regex = re.compile(r"[-_.]+")
-
-
-def canonicalize_name(name):
-    # This is taken from PEP 503.
-    return _canonicalize_regex.sub("-", name).lower()