You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by rl...@apache.org on 2016/01/06 09:56:14 UTC

[13/13] incubator-hawq git commit: HAWQ-271. Remove external python modules.

HAWQ-271. Remove external python modules.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/0672292f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/0672292f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/0672292f

Branch: refs/heads/master
Commit: 0672292f31196287be9538b5fb9420ec854f3d5f
Parents: 7e0ff41
Author: rlei <rl...@pivotal.io>
Authored: Wed Jan 6 10:20:14 2016 +0800
Committer: rlei <rl...@pivotal.io>
Committed: Wed Jan 6 16:19:21 2016 +0800

----------------------------------------------------------------------
 tools/bin/Makefile                              |  126 +-
 tools/bin/ext/Makefile                          |   27 -
 tools/bin/ext/__init__.py                       |  306 --
 tools/bin/ext/figleaf/__init__.py               |  325 --
 tools/bin/ext/figleaf/_lib.py                   |   22 -
 tools/bin/ext/figleaf/annotate.py               |  241 --
 tools/bin/ext/figleaf/annotate_cover.py         |  159 -
 tools/bin/ext/figleaf/annotate_html.py          |  292 --
 tools/bin/ext/figleaf/annotate_sections.py      |   95 -
 tools/bin/ext/figleaf/figleaf2html              |    7 -
 tools/bin/ext/figleaf/internals.py              |  257 --
 tools/bin/ext/figleaf/nose_sections.py          |  133 -
 tools/bin/ext/pg8000/__init__.py                |   37 -
 tools/bin/ext/pg8000/dbapi.py                   |  621 ---
 tools/bin/ext/pg8000/errors.py                  |  115 -
 tools/bin/ext/pg8000/interface.py               |  542 ---
 tools/bin/ext/pg8000/protocol.py                | 1340 -------
 tools/bin/ext/pg8000/types.py                   |  687 ----
 tools/bin/ext/pg8000/util.py                    |   20 -
 tools/bin/ext/pygresql/__init__.py              |    0
 tools/bin/ext/simplejson/__init__.py            |  303 --
 tools/bin/ext/simplejson/_speedups.c            |  234 --
 tools/bin/ext/simplejson/decoder.py             |  289 --
 tools/bin/ext/simplejson/encoder.py             |  387 --
 tools/bin/ext/simplejson/jsonfilter.py          |   56 -
 tools/bin/ext/simplejson/scanner.py             |   79 -
 tools/bin/ext/simplejson/tests/__init__.py      |    0
 tools/bin/ext/simplejson/tests/test_attacks.py  |   22 -
 tools/bin/ext/simplejson/tests/test_dump.py     |   26 -
 tools/bin/ext/simplejson/tests/test_fail.py     |   86 -
 tools/bin/ext/simplejson/tests/test_float.py    |   20 -
 tools/bin/ext/simplejson/tests/test_indent.py   |   57 -
 tools/bin/ext/simplejson/tests/test_pass1.py    |   88 -
 tools/bin/ext/simplejson/tests/test_pass2.py    |   27 -
 tools/bin/ext/simplejson/tests/test_pass3.py    |   32 -
 .../bin/ext/simplejson/tests/test_recursion.py  |   78 -
 .../bin/ext/simplejson/tests/test_separators.py |   57 -
 tools/bin/ext/simplejson/tests/test_unicode.py  |   32 -
 tools/bin/ext/yaml/__init__.py                  |  306 --
 tools/bin/ext/yaml/composer.py                  |  134 -
 tools/bin/ext/yaml/constructor.py               |  691 ----
 tools/bin/ext/yaml/cyaml.py                     |  101 -
 tools/bin/ext/yaml/dumper.py                    |   78 -
 tools/bin/ext/yaml/emitter.py                   | 1179 ------
 tools/bin/ext/yaml/error.py                     |   91 -
 tools/bin/ext/yaml/events.py                    |  102 -
 tools/bin/ext/yaml/loader.py                    |   56 -
 tools/bin/ext/yaml/nodes.py                     |   65 -
 tools/bin/ext/yaml/parser.py                    |  602 ---
 tools/bin/ext/yaml/reader.py                    |  241 --
 tools/bin/ext/yaml/representer.py               |  504 ---
 tools/bin/ext/yaml/resolver.py                  |  239 --
 tools/bin/ext/yaml/scanner.py                   | 1472 -------
 tools/bin/ext/yaml/serializer.py                |  127 -
 tools/bin/ext/yaml/tokens.py                    |  120 -
 tools/bin/generate-greenplum-path.sh            |    2 +-
 tools/bin/gpcheck                               |    4 +-
 tools/bin/gpextract                             |    4 +-
 tools/bin/gpload.py                             |    2 +-
 tools/bin/gppylib/commands/base.py              |   10 +-
 tools/bin/gppylib/db/dbconn.py                  |    2 +-
 tools/bin/gppylib/operations/gpMigratorUtil.py  |    2 +-
 .../bin/gppylib/operations/test_utils_helper.py |    2 +-
 .../test/regress/test_regress_pygresql.py       |    4 +-
 tools/bin/hawq_ctl                              |    2 +-
 tools/bin/hawqconfig                            |    2 +-
 tools/bin/hawqextract                           |    4 +-
 tools/bin/hawqfilespace                         |    2 +-
 tools/bin/hawqstate                             |    2 +-
 tools/bin/lib/gpcheckcat                        |    4 +-
 tools/bin/pythonSrc/.gitignore                  |    8 -
 tools/bin/pythonSrc/PSI-0.3b2_gp.tar.gz         |  Bin 93810 -> 0 bytes
 .../pythonSrc/PyGreSQL-4.0/docs/announce.html   |   28 -
 .../pythonSrc/PyGreSQL-4.0/docs/announce.txt    |   23 -
 .../pythonSrc/PyGreSQL-4.0/docs/changelog.html  |  333 --
 .../pythonSrc/PyGreSQL-4.0/docs/changelog.txt   |  285 --
 .../bin/pythonSrc/PyGreSQL-4.0/docs/default.css |  279 --
 tools/bin/pythonSrc/PyGreSQL-4.0/docs/docs.css  |  109 -
 .../bin/pythonSrc/PyGreSQL-4.0/docs/future.html |   62 -
 .../bin/pythonSrc/PyGreSQL-4.0/docs/future.txt  |   48 -
 .../bin/pythonSrc/PyGreSQL-4.0/docs/index.html  |  182 -
 .../pythonSrc/PyGreSQL-4.0/docs/install.html    |  198 -
 .../bin/pythonSrc/PyGreSQL-4.0/docs/install.txt |  188 -
 tools/bin/pythonSrc/PyGreSQL-4.0/docs/pg.html   | 2429 -----------
 tools/bin/pythonSrc/PyGreSQL-4.0/docs/pg.txt    | 1382 -------
 tools/bin/pythonSrc/PyGreSQL-4.0/docs/pgdb.html |   51 -
 tools/bin/pythonSrc/PyGreSQL-4.0/docs/pgdb.txt  |   42 -
 .../bin/pythonSrc/PyGreSQL-4.0/docs/readme.html |  243 --
 .../bin/pythonSrc/PyGreSQL-4.0/docs/readme.txt  |  206 -
 tools/bin/pythonSrc/PyGreSQL-4.0/pg.py          |  711 ----
 tools/bin/pythonSrc/PyGreSQL-4.0/pgdb.py        |  582 ---
 tools/bin/pythonSrc/PyGreSQL-4.0/pgmodule.c     | 3756 ------------------
 tools/bin/pythonSrc/PyGreSQL-4.0/setup.py       |  152 -
 .../pythonSrc/PyGreSQL-4.0/tutorial/advanced.py |  198 -
 .../pythonSrc/PyGreSQL-4.0/tutorial/basics.py   |  296 --
 .../bin/pythonSrc/PyGreSQL-4.0/tutorial/func.py |  205 -
 .../pythonSrc/PyGreSQL-4.0/tutorial/syscat.py   |  149 -
 tools/bin/pythonSrc/epydoc-3.0.1.tar.gz         |  Bin 1845788 -> 0 bytes
 tools/bin/pythonSrc/lockfile-0.9.1.tar.gz       |  Bin 16949 -> 0 bytes
 tools/bin/pythonSrc/logilab-astng-0.20.1.tar.gz |  Bin 103800 -> 0 bytes
 .../bin/pythonSrc/logilab-common-0.50.1.tar.gz  |  Bin 164975 -> 0 bytes
 tools/bin/pythonSrc/paramiko-1.7.6-9.tar.gz     |  Bin 212556 -> 0 bytes
 tools/bin/pythonSrc/pychecker-0.8.18.tar.gz     |  Bin 145805 -> 0 bytes
 tools/bin/pythonSrc/pycrypto-2.0.1.tar.gz       |  Bin 157389 -> 0 bytes
 tools/bin/pythonSrc/pylint-0.21.0.tar.gz        |  Bin 199212 -> 0 bytes
 tools/bin/pythonSrc/unittest2-0.5.1.tar.gz      |  Bin 62470 -> 0 bytes
 106 files changed, 29 insertions(+), 25497 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/Makefile
----------------------------------------------------------------------
diff --git a/tools/bin/Makefile b/tools/bin/Makefile
index 88b9634..7d6423c 100644
--- a/tools/bin/Makefile
+++ b/tools/bin/Makefile
@@ -36,7 +36,7 @@ PYLIB_SRC=$(SRC)/pythonSrc
 LIB_DIR=$(SRC)/lib
 PYLIB_DIR=$(SRC)/ext
 
-all: lockfile paramiko pycrypto pygresql stream pychecker psi unittest2
+all: stream 
 
 #
 # Python Libraries
@@ -44,13 +44,6 @@ all: lockfile paramiko pycrypto pygresql stream pychecker psi unittest2
 
 
 NETPERF_DIR=netperf-2.4.3
-netperf:
-	@echo "--- netperf"
-	if [ ! -f $(SRC)/src/$(NETPERF_DIR)/Makefile ]; then \
-		(cd $(SRC)/src && gzip -d -c $(NETPERF_DIR).tar.gz | tar xvf - && cd $(NETPERF_DIR) && CC="$(CC)" CFLAGS="${CFLAGS}" LDFLAGS="${LDFLAGS}" ./configure) \
-	fi
-	cd $(SRC)/src/$(NETPERF_DIR) && $(MAKE)
-	cp $(SRC)/src/$(NETPERF_DIR)/src/netperf $(SRC)/src/$(NETPERF_DIR)/src/netserver lib/
 
 #
 #  STREAM
@@ -61,120 +54,6 @@ stream:
 	cd $(STREAM_DIR) && NO_M64=TRUE $(CC) $(CFLAGS) stream.c -o stream
 	cp $(STREAM_DIR)/stream lib/stream 
 
-#
-# PyGreSQL
-#
-PYGRESQL_VERSION=4.0
-PYGRESQL_DIR=PyGreSQL-$(PYGRESQL_VERSION)
-
-pygresql:
-	@echo "--- PyGreSQL"
-	cd $(PYLIB_SRC)/$(PYGRESQL_DIR) && CC="$(CC)" CFLAGS="${CFLAGS}" LDFLAGS="-L$(top_builddir)/src/interfaces/libpq ${LDFLAGS}" python setup.py build
-	mkdir -p $(PYLIB_DIR)/pygresql
-	cp -r $(PYLIB_SRC)/$(PYGRESQL_DIR)/build/lib.*/* $(PYLIB_DIR)/pygresql
-	touch $(PYLIB_DIR)/__init__.py
-
-#
-# PARAMIKO
-#
-PARAMIKO_VERSION=1.7.6-9
-PARAMIKO_DIR=paramiko-$(PARAMIKO_VERSION)
-paramiko:
-	@echo "--- paramiko"
-	cd $(PYLIB_SRC)/ && $(TAR) xzf $(PARAMIKO_DIR).tar.gz
-	cd $(PYLIB_SRC)/$(PARAMIKO_DIR)/ && python setup.py build
-	cp -r $(PYLIB_SRC)/$(PARAMIKO_DIR)/build/lib/paramiko  $(PYLIB_DIR)/
-
-#
-# LOCKFILE
-#
-# note the awk commands are used to eliminate references to code in __init__.py
-# that we don't use and also to insert a comment to tell pylint not to complain
-# about these files since we are not in a position to correct those warnings.
-#
-LOCKFILE_VERSION=0.9.1
-LOCKFILE_DIR=lockfile-$(LOCKFILE_VERSION)
-LOCKFILE_SRC=$(PYLIB_SRC)/$(LOCKFILE_DIR)/build/lib/lockfile
-LOCKFILE_DST=$(PYLIB_DIR)/lockfile
-lockfile:
-	@echo "--- lockfile"
-	cd $(PYLIB_SRC)/ && $(TAR) xzf $(LOCKFILE_DIR).tar.gz
-	cd $(PYLIB_SRC)/$(LOCKFILE_DIR)/ && python setup.py build
-	mkdir -p $(PYLIB_DIR)/lockfile
-	awk 'BEGIN{print "# pylint: disable-all"} /^if hasattr/ {exit} { print }' < $(LOCKFILE_SRC)/__init__.py    > $(LOCKFILE_DST)/__init__.py
-	awk 'BEGIN{print "# pylint: disable-all"} { print }'                      < $(LOCKFILE_SRC)/pidlockfile.py > $(LOCKFILE_DST)/pidlockfile.py
-
-#
-# PYCRYPTO
-#
-PYCRYPTO_VERSION=2.0.1
-PYCRYPTO_DIR=pycrypto-$(PYCRYPTO_VERSION)
-
-pycrypto:
-	@echo "--- pycrypto"
-	cd $(PYLIB_SRC)/ && $(TAR) xzf $(PYCRYPTO_DIR).tar.gz
-	cd $(PYLIB_SRC)/$(PYCRYPTO_DIR)/ && CC="$(CC)" CFLAGS="${CFLAGS}" LDFLAGS="${LDFLAGS}" python setup.py build
-	cp -r $(PYLIB_SRC)/$(PYCRYPTO_DIR)/build/lib.*/Crypto $(PYLIB_DIR)
-
-#
-# PSI
-#
-PSI_VERSION=0.3b2_gp
-PSI_DIR=PSI-$(PSI_VERSION)
-
-psi:
-	@echo "--- psi"
-	cd $(PYLIB_SRC)/ && $(TAR) xzf $(PSI_DIR).tar.gz
-	cd $(PYLIB_SRC)/$(PSI_DIR)/ && CC="$(CC)" CFLAGS="${CFLAGS}" LDFLAGS="${LDFLAGS}" python setup.py build
-	cp -r $(PYLIB_SRC)/$(PSI_DIR)/build/lib.*/psi $(PYLIB_DIR)
-
-
-#
-# PYCHECKER
-#
-PYCHECKER_VERSION=0.8.18
-PYCHECKER_DIR=pychecker-$(PYCHECKER_VERSION)
-
-pychecker:
-	@echo "--- pychecker"
-	cd $(PYLIB_SRC)/ && $(TAR) xzf $(PYCHECKER_DIR).tar.gz
-	cd $(PYLIB_SRC)/$(PYCHECKER_DIR)/ && python setup.py build
-	cp -r $(PYLIB_SRC)/$(PYCHECKER_DIR)/build/lib/pychecker  $(PYLIB_DIR)/
-
-
-#
-# PYLINT
-#
-
-PYLINT_VERSION=0.21.0
-PYLINT_DIR=pylint-$(PYLINT_VERSION)
-LOGILAB_ASTNG_VERSION=0.20.1
-LOGILAB_ASTNG_DIR=logilab-astng-$(LOGILAB_ASTNG_VERSION)
-LOGILAB_COMMON_VERSION=0.50.1
-LOGILAB_COMMON_DIR=logilab-common-$(LOGILAB_COMMON_VERSION)
-PYLINT_PYTHONPATH=$(PYLIB_DIR):$(PYLIB_SRC)/$(PYLINT_DIR)/build/lib/
-
-pylint:
-	@echo "--- pylint"
-	@cd $(PYLIB_SRC)/ && $(TAR) xzf $(PYLINT_DIR).tar.gz
-	@cd $(PYLIB_SRC)/ && $(TAR) xzf $(LOGILAB_ASTNG_DIR).tar.gz
-	@cd $(PYLIB_SRC)/ && $(TAR) xzf $(LOGILAB_COMMON_DIR).tar.gz
-	@cd $(PYLIB_SRC)/$(PYLINT_DIR)/ && python setup.py build 1> /dev/null
-	@cd $(PYLIB_SRC)/$(LOGILAB_ASTNG_DIR)/ && python setup.py build 1> /dev/null
-	@cd $(PYLIB_SRC)/$(LOGILAB_COMMON_DIR)/ && python setup.py build 1> /dev/null
-	@cp -r $(PYLIB_SRC)/$(LOGILAB_COMMON_DIR)/build/lib/logilab $(PYLIB_SRC)/$(PYLINT_DIR)/build/lib/
-	@cp -r $(PYLIB_SRC)/$(LOGILAB_ASTNG_DIR)/build/lib/logilab $(PYLIB_SRC)/$(PYLINT_DIR)/build/lib/
-	@touch $(PYLIB_SRC)/$(PYLINT_DIR)/build/lib/__init__.py
-	@touch $(PYLIB_SRC)/$(PYLINT_DIR)/build/lib/logilab/__init__.py
-
-UNITTEST2_VERSION=0.5.1
-UNITTEST2_DIR=unittest2-${UNITTEST2_VERSION}
-unittest2:
-	@echo "--- unittest2"
-	cd $(PYLIB_SRC)/ && $(TAR) xzf $(UNITTEST2_DIR).tar.gz
-	cd $(PYLIB_SRC)/$(UNITTEST2_DIR)/ && python setup.py build
-	cp -r $(PYLIB_SRC)/$(UNITTEST2_DIR)/build/lib/unittest2  $(PYLIB_DIR)/
-
 
 PYTHON_FILES=`grep -l --exclude=Makefile --exclude=gplogfilter --exclude=gpcheckos --exclude=gpgenfsmap.py --exclude=throttlingD.py "/bin/env python" *`\
 			 `grep -l "/bin/env python" $(SRC)/../sbin/*`\
@@ -246,9 +125,6 @@ install: all
 	for files in `find * -maxdepth 0 -type f | grep -x -v -E "${SKIP_INSTALL}"`; do ${INSTALL_SCRIPT} $${files} ${bindir}; done
 	${MAKE} -C gppylib $@
 	${MAKE} -C hawqpylib $@
-	${MAKE} -C ext $@
-	# for dirs in `find hawqpylib -type d` ; do ${INSTALL_SCRIPT} -d ${bindir}/hawqpylib/$${dirs}; done
-	# for files in `find hawqpylib -type f` ; do ${INSTALL_SCRIPT} $${files} ${bindir}/hawqpylib/; done
 	${INSTALL_SCRIPT} -d ${bindir}/lib
 	for files in `find lib -type f`; do ${INSTALL_SCRIPT} $${files} ${bindir}/lib; done
 	unset LIBPATH; ./generate-greenplum-path.sh $(prefix) > ${prefix}/greenplum_path.sh

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/Makefile
----------------------------------------------------------------------
diff --git a/tools/bin/ext/Makefile b/tools/bin/ext/Makefile
deleted file mode 100644
index ba49f09..0000000
--- a/tools/bin/ext/Makefile
+++ /dev/null
@@ -1,27 +0,0 @@
-#-------------------------------------------------------------------------
-#
-# Makefile for the managerment utilities
-#
-#-------------------------------------------------------------------------
-
-subdir = tools/bin/ext
-top_builddir = ../../..
-include $(top_builddir)/src/Makefile.global
-
-SKIP_INSTALL=.gitignore|.p4ignore|.rcfile|Makefile|test/
-
-install:
-	${INSTALL_SCRIPT} -d ${libdir}/python/
-	@for file in `find * -type f | grep -v -E "${SKIP_INSTALL}"`; \
-		do \
-			echo "install $${file} into ${libdir}/python/$${file}" ; \
-			${INSTALL_SCRIPT} $${file} ${libdir}/python/$${file}; \
-		done
-	@for dirs in `find * -type d | grep -v test` ;\
-		do \
-			${INSTALL_SCRIPT} -d ${libdir}/python/$${dirs}; \
-			for file in `find $${dirs} -type f | grep -v -E "${SKIP_INSTALL}"`; do \
-				echo "install $${file} into ${libdir}/python/$${file}" ; \
-				${INSTALL_SCRIPT} $${file} ${libdir}/python/$${file}; \
-			done \
-		done

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/__init__.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/__init__.py b/tools/bin/ext/__init__.py
deleted file mode 100644
index b6c84f9..0000000
--- a/tools/bin/ext/__init__.py
+++ /dev/null
@@ -1,306 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from error import *
-
-from tokens import *
-from events import *
-from nodes import *
-
-from loader import *
-from dumper import *
-
-try:
-    from cyaml import *
-except ImportError:
-    pass
-
-def scan(stream, Loader=Loader):
-    """
-    Scan a YAML stream and produce scanning tokens.
-    """
-    loader = Loader(stream)
-    while loader.check_token():
-        yield loader.get_token()
-
-def parse(stream, Loader=Loader):
-    """
-    Parse a YAML stream and produce parsing events.
-    """
-    loader = Loader(stream)
-    while loader.check_event():
-        yield loader.get_event()
-
-def compose(stream, Loader=Loader):
-    """
-    Parse the first YAML document in a stream
-    and produce the corresponding representation tree.
-    """
-    loader = Loader(stream)
-    if loader.check_node():
-        return loader.get_node()
-
-def compose_all(stream, Loader=Loader):
-    """
-    Parse all YAML documents in a stream
-    and produce corresponsing representation trees.
-    """
-    loader = Loader(stream)
-    while loader.check_node():
-        yield loader.get_node()
-
-def load_all(stream, Loader=Loader):
-    """
-    Parse all YAML documents in a stream
-    and produce corresponding Python objects.
-    """
-    loader = Loader(stream)
-    while loader.check_data():
-        yield loader.get_data()
-
-def load(stream, Loader=Loader):
-    """
-    Parse the first YAML document in a stream
-    and produce the corresponding Python object.
-    """
-    loader = Loader(stream)
-    if loader.check_data():
-        return loader.get_data()
-
-def safe_load_all(stream):
-    """
-    Parse all YAML documents in a stream
-    and produce corresponding Python objects.
-    Resolve only basic YAML tags.
-    """
-    return load_all(stream, SafeLoader)
-
-def safe_load(stream):
-    """
-    Parse the first YAML document in a stream
-    and produce the corresponding Python object.
-    Resolve only basic YAML tags.
-    """
-    return load(stream, SafeLoader)
-
-def emit(events, stream=None, Dumper=Dumper,
-        canonical=None, indent=None, width=None,
-        allow_unicode=None, line_break=None):
-    """
-    Emit YAML parsing events into a stream.
-    If stream is None, return the produced string instead.
-    """
-    getvalue = None
-    if stream is None:
-        try:
-            from cStringIO import StringIO
-        except ImportError:
-            from StringIO import StringIO
-        stream = StringIO()
-        getvalue = stream.getvalue
-    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
-            allow_unicode=allow_unicode, line_break=line_break)
-    for event in events:
-        dumper.emit(event)
-    if getvalue:
-        return getvalue()
-
-def serialize_all(nodes, stream=None, Dumper=Dumper,
-        canonical=None, indent=None, width=None,
-        allow_unicode=None, line_break=None,
-        encoding='utf-8', explicit_start=None, explicit_end=None,
-        version=None, tags=None):
-    """
-    Serialize a sequence of representation trees into a YAML stream.
-    If stream is None, return the produced string instead.
-    """
-    getvalue = None
-    if stream is None:
-        try:
-            from cStringIO import StringIO
-        except ImportError:
-            from StringIO import StringIO
-        stream = StringIO()
-        getvalue = stream.getvalue
-    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
-            allow_unicode=allow_unicode, line_break=line_break,
-            encoding=encoding, version=version, tags=tags,
-            explicit_start=explicit_start, explicit_end=explicit_end)
-    dumper.open()
-    for node in nodes:
-        dumper.serialize(node)
-    dumper.close()
-    if getvalue:
-        return getvalue()
-
-def serialize(node, stream=None, Dumper=Dumper, **kwds):
-    """
-    Serialize a representation tree into a YAML stream.
-    If stream is None, return the produced string instead.
-    """
-    return serialize_all([node], stream, Dumper=Dumper, **kwds)
-
-def dump_all(documents, stream=None, Dumper=Dumper,
-        default_style=None, default_flow_style=None,
-        canonical=None, indent=None, width=None,
-        allow_unicode=None, line_break=None,
-        encoding='utf-8', explicit_start=None, explicit_end=None,
-        version=None, tags=None):
-    """
-    Serialize a sequence of Python objects into a YAML stream.
-    If stream is None, return the produced string instead.
-    """
-    getvalue = None
-    if stream is None:
-        try:
-            from cStringIO import StringIO
-        except ImportError:
-            from StringIO import StringIO
-        stream = StringIO()
-        getvalue = stream.getvalue
-    dumper = Dumper(stream, default_style=default_style,
-            default_flow_style=default_flow_style,
-            canonical=canonical, indent=indent, width=width,
-            allow_unicode=allow_unicode, line_break=line_break,
-            encoding=encoding, version=version, tags=tags,
-            explicit_start=explicit_start, explicit_end=explicit_end)
-    dumper.open()
-    for data in documents:
-        dumper.represent(data)
-    dumper.close()
-    if getvalue:
-        return getvalue()
-
-def dump(data, stream=None, Dumper=Dumper, **kwds):
-    """
-    Serialize a Python object into a YAML stream.
-    If stream is None, return the produced string instead.
-    """
-    return dump_all([data], stream, Dumper=Dumper, **kwds)
-
-def safe_dump_all(documents, stream=None, **kwds):
-    """
-    Serialize a sequence of Python objects into a YAML stream.
-    Produce only basic YAML tags.
-    If stream is None, return the produced string instead.
-    """
-    return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
-
-def safe_dump(data, stream=None, **kwds):
-    """
-    Serialize a Python object into a YAML stream.
-    Produce only basic YAML tags.
-    If stream is None, return the produced string instead.
-    """
-    return dump_all([data], stream, Dumper=SafeDumper, **kwds)
-
-def add_implicit_resolver(tag, regexp, first=None,
-        Loader=Loader, Dumper=Dumper):
-    """
-    Add an implicit scalar detector.
-    If an implicit scalar value matches the given regexp,
-    the corresponding tag is assigned to the scalar.
-    first is a sequence of possible initial characters or None.
-    """
-    Loader.add_implicit_resolver(tag, regexp, first)
-    Dumper.add_implicit_resolver(tag, regexp, first)
-
-def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
-    """
-    Add a path based resolver for the given tag.
-    A path is a list of keys that forms a path
-    to a node in the representation tree.
-    Keys can be string values, integers, or None.
-    """
-    Loader.add_path_resolver(tag, path, kind)
-    Dumper.add_path_resolver(tag, path, kind)
-
-def add_constructor(tag, constructor, Loader=Loader):
-    """
-    Add a constructor for the given tag.
-    Constructor is a function that accepts a Loader instance
-    and a node object and produces the corresponding Python object.
-    """
-    Loader.add_constructor(tag, constructor)
-
-def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
-    """
-    Add a multi-constructor for the given tag prefix.
-    Multi-constructor is called for a node if its tag starts with tag_prefix.
-    Multi-constructor accepts a Loader instance, a tag suffix,
-    and a node object and produces the corresponding Python object.
-    """
-    Loader.add_multi_constructor(tag_prefix, multi_constructor)
-
-def add_representer(data_type, representer, Dumper=Dumper):
-    """
-    Add a representer for the given type.
-    Representer is a function accepting a Dumper instance
-    and an instance of the given data type
-    and producing the corresponding representation node.
-    """
-    Dumper.add_representer(data_type, representer)
-
-def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
-    """
-    Add a representer for the given type.
-    Multi-representer is a function accepting a Dumper instance
-    and an instance of the given data type or subtype
-    and producing the corresponding representation node.
-    """
-    Dumper.add_multi_representer(data_type, multi_representer)
-
-class YAMLObjectMetaclass(type):
-    """
-    The metaclass for YAMLObject.
-    """
-    def __init__(cls, name, bases, kwds):
-        super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
-        if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
-            cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
-            cls.yaml_dumper.add_representer(cls, cls.to_yaml)
-
-class YAMLObject(object):
-    """
-    An object that can dump itself to a YAML stream
-    and load itself from a YAML stream.
-    """
-
-    __metaclass__ = YAMLObjectMetaclass
-    __slots__ = ()  # no direct instantiation, so allow immutable subclasses
-
-    yaml_loader = Loader
-    yaml_dumper = Dumper
-
-    yaml_tag = None
-    yaml_flow_style = None
-
-    def from_yaml(cls, loader, node):
-        """
-        Convert a representation node to a Python object.
-        """
-        return loader.construct_yaml_object(node, cls)
-    from_yaml = classmethod(from_yaml)
-
-    def to_yaml(cls, dumper, data):
-        """
-        Convert a Python object to a representation node.
-        """
-        return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
-                flow_style=cls.yaml_flow_style)
-    to_yaml = classmethod(to_yaml)
-

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/__init__.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/__init__.py b/tools/bin/ext/figleaf/__init__.py
deleted file mode 100644
index 5761930..0000000
--- a/tools/bin/ext/figleaf/__init__.py
+++ /dev/null
@@ -1,325 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-"""
-figleaf is another tool to trace Python code coverage.
-
-figleaf uses the sys.settrace hook to record which statements are
-executed by the CPython interpreter; this record can then be saved
-into a file, or otherwise communicated back to a reporting script.
-
-figleaf differs from the gold standard of Python coverage tools
-('coverage.py') in several ways.  First and foremost, figleaf uses the
-same criterion for "interesting" lines of code as the sys.settrace
-function, which obviates some of the complexity in coverage.py (but
-does mean that your "loc" count goes down).  Second, figleaf does not
-record code executed in the Python standard library, which results in
-a significant speedup.  And third, the format in which the coverage
-format is saved is very simple and easy to work with.
-
-You might want to use figleaf if you're recording coverage from
-multiple types of tests and need to aggregate the coverage in
-interesting ways, and/or control when coverage is recorded.
-coverage.py is a better choice for command-line execution, and its
-reporting is a fair bit nicer.
-
-Command line usage: ::
-
-  figleaf <python file to execute> <args to python file>
-
-The figleaf output is saved into the file '.figleaf', which is an
-*aggregate* of coverage reports from all figleaf runs from this
-directory.  '.figleaf' contains a pickled dictionary of sets; the keys
-are source code filenames, and the sets contain all line numbers
-executed by the Python interpreter. See the docs or command-line
-programs in bin/ for more information.
-
-High level API: ::
-
- * ``start(ignore_lib=True)`` -- start recording code coverage.
- * ``stop()``                 -- stop recording code coverage.
- * ``get_trace_obj()``        -- return the (singleton) trace object.
- * ``get_info()``             -- get the coverage dictionary
-
-Classes & functions worth knowing about (lower level API):
-
- * ``get_lines(fp)`` -- return the set of interesting lines in the fp.
- * ``combine_coverage(d1, d2)`` -- combine coverage info from two dicts.
- * ``read_coverage(filename)`` -- load the coverage dictionary
- * ``write_coverage(filename)`` -- write the coverage out.
- * ``annotate_coverage(...)`` -- annotate a Python file with its coverage info.
-
-Known problems:
-
- -- module docstrings are *covered* but not found.
-
-AUTHOR: C. Titus Brown, titus@idyll.org, with contributions from Iain Lowe.
-
-'figleaf' is Copyright (C) 2006, 2007 C. Titus Brown.  It is under the
-BSD license.
-"""
-__version__ = "0.6.1"
-
-# __all__ == @CTB
-
-import sys
-import os
-from cPickle import dump, load
-from optparse import OptionParser
-
-import internals
-
-# use builtin sets if in >= 2.4, otherwise use 'sets' module.
-try:
-    set()
-except NameError:
-    from sets import Set as set
-
-def get_lines(fp):
-    """
-    Return the set of interesting lines in the source code read from
-    this file handle.
-    """
-    # rstrip is a workaround for http://bugs.python.org/issue4262
-    src = fp.read().rstrip() + "\n"
-    code = compile(src, "", "exec")
-    
-    return internals.get_interesting_lines(code)
-
-def combine_coverage(d1, d2):
-    """
-    Given two coverage dictionaries, combine the recorded coverage
-    and return a new dictionary.
-    """
-    keys = set(d1.keys())
-    keys.update(set(d2.keys()))
-
-    new_d = {}
-    for k in keys:
-        v = d1.get(k, set())
-        v2 = d2.get(k, set())
-
-        s = set(v)
-        s.update(v2)
-        new_d[k] = s
-
-    return new_d
-
-def write_coverage(filename, append=True):
-    """
-    Write the current coverage info out to the given filename.  If
-    'append' is false, destroy any previously recorded coverage info.
-    """
-    if _t is None:
-        return
-
-    data = internals.CoverageData(_t)
-
-    d = data.gather_files()
-
-    # sum existing coverage?
-    if append:
-        old = {}
-        fp = None
-        try:
-            fp = open(filename)
-        except IOError:
-            pass
-
-        if fp:
-            old = load(fp)
-            fp.close()
-            d = combine_coverage(d, old)
-
-    # ok, save.
-    outfp = open(filename, 'w')
-    try:
-        dump(d, outfp)
-    finally:
-        outfp.close()
-
-def read_coverage(filename):
-    """
-    Read a coverage dictionary in from the given file.
-    """
-    fp = open(filename)
-    try:
-        d = load(fp)
-    finally:
-        fp.close()
-
-    return d
-
-def dump_pickled_coverage(out_fp):
-    """
-    Dump coverage information in pickled format into the given file handle.
-    """
-    dump(_t, out_fp)
-
-def load_pickled_coverage(in_fp):
-    """
-    Replace (overwrite) coverage information from the given file handle.
-    """
-    global _t
-    _t = load(in_fp)
-
-def annotate_coverage(in_fp, out_fp, covered, all_lines,
-                      mark_possible_lines=False):
-    """
-    A simple example coverage annotator that outputs text.
-    """
-    for i, line in enumerate(in_fp):
-        i = i + 1
-
-        if i in covered:
-            symbol = '>'
-        elif i in all_lines:
-            symbol = '!'
-        else:
-            symbol = ' '
-
-        symbol2 = ''
-        if mark_possible_lines:
-            symbol2 = ' '
-            if i in all_lines:
-                symbol2 = '-'
-
-        out_fp.write('%s%s %s' % (symbol, symbol2, line,))
-
-def get_data():
-    if _t:
-        return internals.CoverageData(_t)
-
-#######################
-
-#
-# singleton functions/top-level API
-#
-
-_t = None
-
-def init(exclude_path=None, include_only=None):
-    from internals import CodeTracer
-    
-    global _t
-    if _t is None:
-        _t = CodeTracer(exclude_path, include_only)
-
-def start(ignore_python_lib=True):
-    """
-    Start tracing code coverage.  If 'ignore_python_lib' is True on
-    initial call, ignore all files that live below the same directory as
-    the 'os' module.
-    """
-    global _t
-    if not _t:
-        exclude_path = None
-        if ignore_python_lib:
-            exclude_path = os.path.realpath(os.path.dirname(os.__file__))
-
-        init(exclude_path, None)
-    
-    _t.start()
-
-def start_section(name):
-    global _t
-    _t.start_section(name)
-    
-def stop_section():
-    global _t
-    _t.stop_section()
-
-def stop():
-    """
-    Stop tracing code coverage.
-    """
-    global _t
-    if _t is not None:
-        _t.stop()
-
-def get_trace_obj():
-    """
-    Return the (singleton) trace object, if it exists.
-    """
-    return _t
-
-def get_info(section_name=None):
-    """
-    Get the coverage dictionary from the trace object.
-    """
-    if _t:
-        return get_data().gather_files(section_name)
-
-#############
-
-def display_ast():
-    l = internals.LineGrabber(open(sys.argv[1]))
-    l.pretty_print()
-    print l.lines
-
-def main():
-    """
-    Execute the given Python file with coverage, making it look like it is
-    __main__.
-    """
-    ignore_pylibs = False
-
-    # gather args
-
-    n = 1
-    figleaf_args = []
-    for n in range(1, len(sys.argv)):
-        arg = sys.argv[n]
-        if arg.startswith('-'):
-            figleaf_args.append(arg)
-        else:
-            break
-
-    remaining_args = sys.argv[n:]
-
-    usage = "usage: %prog [options] [python_script arg1 arg2 ...]"
-    option_parser = OptionParser(usage=usage)
-
-    option_parser.add_option('-i', '--ignore-pylibs', action="store_true",
-                             dest="ignore_pylibs", default=False,
-                             help="ignore Python library modules")
-
-    (options, args) = option_parser.parse_args(args=figleaf_args)
-    assert len(args) == 0
-
-    if not remaining_args:
-        option_parser.error("you must specify a python script to run!")
-
-    ignore_pylibs = options.ignore_pylibs
-
-    ## Reset system args so that the subsequently exec'd file can read
-    ## from sys.argv
-    
-    sys.argv = remaining_args
-
-    sys.path[0] = os.path.dirname(sys.argv[0])
-
-    cwd = os.getcwd()
-
-    start(ignore_pylibs)        # START code coverage
-
-    import __main__
-    try:
-        execfile(sys.argv[0], __main__.__dict__)
-    finally:
-        stop()                          # STOP code coverage
-
-        write_coverage(os.path.join(cwd, '.figleaf'))

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/_lib.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/_lib.py b/tools/bin/ext/figleaf/_lib.py
deleted file mode 100644
index 3ee49f8..0000000
--- a/tools/bin/ext/figleaf/_lib.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import os.path, sys
-libdir = os.path.join(os.path.dirname(__file__), '../')
-libdir = os.path.normpath(libdir)
-
-if libdir not in sys.path:
-    sys.path.insert(0, libdir)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/annotate.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/annotate.py b/tools/bin/ext/figleaf/annotate.py
deleted file mode 100644
index 43099f6..0000000
--- a/tools/bin/ext/figleaf/annotate.py
+++ /dev/null
@@ -1,241 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-"""
-Common functions for annotating files with figleaf coverage information.
-"""
-import sys, os
-from optparse import OptionParser
-import ConfigParser
-import re
-import logging
-
-import figleaf
-
-thisdir = os.path.dirname(__file__)
-
-try:                                    # 2.3 compatibility
-    logging.basicConfig(format='%(message)s', level=logging.WARNING)
-except TypeError:
-    pass
-
-logger = logging.getLogger('figleaf.annotate')
-
-DEFAULT_CONFIGURE_FILE = ".figleafrc"
-
-### utilities
-
-def safe_conf_get(conf, section, name, default):
-    try:
-        val = conf.get(section, name)
-    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
-        val = default
-
-    return val
-
-def configure(parser):
-    """
-    Configure the optparse.OptionParser object with defaults, optionally
-    loaded from a configuration file.
-    """
-    CONFIG_FILE = os.environ.get('FIGLEAFRC', DEFAULT_CONFIGURE_FILE)
-    
-    parser.add_option("-c", "--coverage-file", action="store",
-                       type="string", dest="coverage_file",
-                       help="File containing figleaf coverage information.")
-    
-    parser.add_option("-s", "--sections-file", action="store",
-                       type="string", dest="sections_file",
-                       help="File containing figleaf sections coverage info.")
-
-    parser.add_option("-v", "--verbose", action="store_true",
-                      dest="verbose")
-
-    conf_file = ConfigParser.ConfigParser()
-    conf_file.read(CONFIG_FILE)         # ignores if not present
-
-    default_coverage_file = safe_conf_get(conf_file,
-                                          'figleaf', 'coverage_file',
-                                          '.figleaf')
-    default_sections_file = safe_conf_get(conf_file,
-                                          'figleaf', 'sections_file',
-                                          '.figleaf_sections')
-    default_verbose = int(safe_conf_get(conf_file, 'figleaf', 'verbose',
-                                        0))
-
-    parser.set_defaults(coverage_file=default_coverage_file,
-                        sections_file=default_sections_file,
-                        verbose=default_verbose)
-
-def filter_coverage(coverage, re_match):
-    """
-    ...
-    """
-    if not re_match:
-        return coverage
-
-    regexp = re.compile(re_match)
-    
-    d = {}
-    for filename, lines in coverage.items():
-        if regexp.match(filename):
-            d[filename] = lines
-            
-    return d
-
-### commands
-
-def list(options, match=""):
-    """
-    List the filenames in the coverage file, optionally limiting it to
-    those files matching to the regexp 'match'.
-    """
-    if options.verbose:
-        print>>sys.stderr, '** Reading coverage from coverage file %s' % \
-                           (options.coverage_file,)
-        if match:
-            print>>sys.stderr, '** Filtering against regexp "%s"' % (match,)
-        
-    coverage = figleaf.read_coverage(options.coverage_file)
-    coverage = filter_coverage(coverage, match)
-
-    for filename in coverage.keys():
-        print filename
-
-def list_sections(options, match=""):
-    """
-    List the filenames in the coverage file, optionally limiting it to
-    those files matching to the regexp 'match'.
-    """
-    if options.verbose:
-        print>>sys.stderr, '** Reading sections info from sections file %s' % \
-                           (options.sections_file,)
-        if match:
-            print>>sys.stderr, '** Filtering against regexp "%s"' % (match,)
-
-    fp = open(options.sections_file)
-    figleaf.load_pickled_coverage(fp) # @CTB
-
-    data = figleaf.internals.CoverageData(figleaf._t)
-    coverage = data.gather_files()
-    coverage = filter_coverage(coverage, match)
-
-    for filename in coverage.keys():
-        print filename
-
-###
-
-def read_exclude_patterns(filename):
-    """
-    Read in exclusion patterns from a file; these are just regexps.
-    """
-    if not filename:
-        return []
-
-    exclude_patterns = []
-
-    fp = open(filename)
-    for line in fp:
-        line = line.rstrip()
-        if line and not line.startswith('#'):
-            pattern = re.compile(line)
-        exclude_patterns.append(pattern)
-
-    return exclude_patterns
-
-def read_files_list(filename):
-    """
-    Read in a list of files from a file; these are relative or absolute paths.
-    """
-    s = {}
-    for line in open(filename):
-        f = line.strip()
-        s[os.path.abspath(f)] = 1
-
-    return s
-
-def filter_files(filenames, exclude_patterns = [], files_list = {}):
-    files_list = dict(files_list)       # make copy
-
-    # list of files specified?
-    if files_list:
-        for filename in files_list.keys():
-            yield filename
-
-        filenames = [ os.path.abspath(x) for x in filenames ]
-        for filename in filenames:
-            try:
-                del files_list[filename]
-            except KeyError:
-                logger.info('SKIPPING %s -- not in files list' % (filename,))
-            
-        return
-
-    ### no files list given -- handle differently
-
-    for filename in filenames:
-        abspath = os.path.abspath(filename)
-        
-        # check to see if we match anything in the exclude_patterns list
-        skip = False
-        for pattern in exclude_patterns:
-            if pattern.search(filename):
-                logger.info('SKIPPING %s -- matches exclusion pattern' % \
-                            (filename,))
-                skip = True
-                break
-
-        if skip:
-            continue
-
-        # next, check to see if we're part of the figleaf package.
-        if thisdir in filename:
-            logger.debug('SKIPPING %s -- part of the figleaf package' % \
-                         (filename,))
-            continue
-
-        # also, check for <string> (source file from things like 'exec'):
-        if filename == '<string>':
-            continue
-
-        # miscellaneous other things: doctests
-        if filename.startswith('<doctest '):
-            continue
-
-        yield filename
-
-###
-
-def main():
-    parser = OptionParser()
-    configure(parser)
-    
-    options, args = parser.parse_args()
-
-    if not len(args):
-        print "ERROR: You must specify a command like 'list' or 'report'.  Use"
-        print "\n    %s -h\n" % (sys.argv[0],)
-        print "for help on commands and options."
-        sys.exit(-1)
-        
-    cmd = args.pop(0)
-
-    if cmd == 'list':
-        list(options, *args)
-    elif cmd == 'list_sections':
-        list_sections(options, *args)
-
-    sys.exit(0)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/annotate_cover.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/annotate_cover.py b/tools/bin/ext/figleaf/annotate_cover.py
deleted file mode 100644
index ac72bfa..0000000
--- a/tools/bin/ext/figleaf/annotate_cover.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import figleaf
-import os
-import re
-
-from annotate import read_exclude_patterns, filter_files, logger
-
-def report_as_cover(coverage, exclude_patterns=[], ):
-    ### now, output.
-
-    keys = coverage.keys()
-    info_dict = {}
-    
-    for k in filter_files(keys):
-        try:
-            pyfile = open(k, 'rU')
-            lines = figleaf.get_lines(pyfile)
-        except IOError:
-            logger.warning('CANNOT OPEN: %s' % k)
-            continue
-        except KeyboardInterrupt:
-            raise
-        except Exception, e:
-            logger.error('ERROR: file %s, exception %s' % (pyfile, str(e)))
-            continue
-
-        # ok, got all the info.  now annotate file ==> html.
-
-        covered = coverage[k]
-        pyfile = open(k)
-        (n_covered, n_lines, output) = make_cover_lines(lines, covered, pyfile)
-
-
-        try:
-            pcnt = n_covered * 100. / n_lines
-        except ZeroDivisionError:
-            pcnt = 100
-        info_dict[k] = (n_lines, n_covered, pcnt)
-
-        outfile = make_cover_filename(k)
-        try:
-            outfp = open(outfile, 'w')
-            outfp.write("\n".join(output))
-            outfp.write("\n")
-            outfp.close()
-        except IOError:
-            logger.warning('cannot open filename %s' % (outfile,))
-            continue
-
-        logger.info('reported on %s' % (outfile,))
-
-    ### print a summary, too.
-
-    info_dict_items = info_dict.items()
-
-    def sort_by_pcnt(a, b):
-        a = a[1][2]
-        b = b[1][2]
-
-        return -cmp(a,b)
-
-    info_dict_items.sort(sort_by_pcnt)
-
-    logger.info('reported on %d file(s) total\n' % len(info_dict))
-    return len(info_dict)
-
-def make_cover_lines(line_info, coverage_info, fp):
-    n_covered = n_lines = 0
-    output = []
-    
-    for i, line in enumerate(fp):
-        is_covered = False
-        is_line = False
-
-        i += 1
-
-        if i in coverage_info:
-            is_covered = True
-            prefix = '+'
-
-            n_covered += 1
-            n_lines += 1
-        elif i in line_info:
-            prefix = '-'
-            is_line = True
-
-            n_lines += 1
-        else:
-            prefix = '0'
-
-        line = line.rstrip()
-        output.append(prefix + ' ' + line)
-    
-    return (n_covered, n_lines, output)
-
-def make_cover_filename(orig):
-    return orig + '.cover'
-
-def main():
-    import sys
-    import logging
-    from optparse import OptionParser
-    
-    ###
-
-    option_parser = OptionParser()
-
-    option_parser.add_option('-x', '--exclude-patterns', action="store",
-                             dest="exclude_patterns_file",
-                             help="file containing regexp patterns to exclude")
-
-    option_parser.add_option('-q', '--quiet', action='store_true',
-                             dest='quiet',
-         help="file containig regexp patterns of files to exclude from report")
-    
-    option_parser.add_option('-D', '--debug', action='store_true',
-                             dest='debug',
-                             help='Show all debugging messages')
-
-    (options, args) = option_parser.parse_args()
-
-    if options.quiet:
-        logging.disable(logging.DEBUG)
-
-    if options.debug:
-        logger.setLevel(logging.DEBUG)
-
-    ### load
-
-    if not args:
-        args = ['.figleaf']
-
-    coverage = {}
-    for filename in args:
-        logger.debug("loading coverage info from '%s'\n" % (filename,))
-        d = figleaf.read_coverage(filename)
-        coverage = figleaf.combine_coverage(coverage, d)
-
-    if not coverage:
-        logger.warning('EXITING -- no coverage info!\n')
-        sys.exit(-1)
-
-    exclude = read_exclude_patterns(options.exclude_patterns_file)
-    report_as_cover(coverage, exclude)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/annotate_html.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/annotate_html.py b/tools/bin/ext/figleaf/annotate_html.py
deleted file mode 100644
index 594d07a..0000000
--- a/tools/bin/ext/figleaf/annotate_html.py
+++ /dev/null
@@ -1,292 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import figleaf
-import os
-import re
-
-# use builtin sets if in >= 2.4, otherwise use 'sets' module.
-try:
-    set()
-except NameError:
-    from sets import Set as set
-
-from figleaf.annotate import read_exclude_patterns, filter_files, logger, \
-     read_files_list
-
-###
-
-def annotate_file(fp, lines, covered):
-    # initialize
-    n_covered = n_lines = 0
-
-    output = []
-    for i, line in enumerate(fp):
-        is_covered = False
-        is_line = False
-
-        i += 1
-
-        if i in covered:
-            is_covered = True
-
-            n_covered += 1
-            n_lines += 1
-        elif i in lines:
-            is_line = True
-
-            n_lines += 1
-
-        color = 'black'
-        if is_covered:
-            color = 'green'
-        elif is_line:
-            color = 'red'
-
-        line = escape_html(line.rstrip())
-        output.append('<font color="%s">%4d. %s</font>' % (color, i, line))
-
-    try:
-        percent = n_covered * 100. / n_lines
-    except ZeroDivisionError:
-        percent = 100
-
-    return output, n_covered, n_lines, percent
-
-def write_html_summary(info_dict, directory):
-    info_dict_items = info_dict.items()
-
-    def sort_by_percent(a, b):
-        a = a[1][2]
-        b = b[1][2]
-
-        return -cmp(a,b)
-    info_dict_items.sort(sort_by_percent)
-
-    summary_lines = sum([ v[0] for (k, v) in info_dict_items])
-    summary_cover = sum([ v[1] for (k, v) in info_dict_items])
-
-    summary_percent = 100
-    if summary_lines:
-        summary_percent = float(summary_cover) * 100. / float(summary_lines)
-
-
-    percents = [ float(v[1]) * 100. / float(v[0])
-                 for (k, v) in info_dict_items if v[0] ]
-    
-    percent_90 = [ x for x in percents if x >= 90 ]
-    percent_75 = [ x for x in percents if x >= 75 ]
-    percent_50 = [ x for x in percents if x >= 50 ]
-
-    ### write out summary.
-
-    index_fp = open('%s/index.html' % (directory,), 'w')
-    index_fp.write('''
-<html>
-<title>figleaf code coverage report</title>
-<h2>Summary</h2>
-%d files total: %d files &gt; 90%%, %d files &gt; 75%%, %d files &gt; 50%%
-<p>
-<table border=1>
-<tr>
- <th>Filename</th><th># lines</th><th># covered</th><th>%% covered</th>
-</tr>
-
-<tr>
- <td><b>totals:</b></td>
- <td><b>%d</b></td>
- <td><b>%d</b></td>
- <td><b>%.1f%%</b></td>
-</tr>
-
-<tr></tr>
-
-''' % (len(percents), len(percent_90), len(percent_75), len(percent_50),
-       summary_lines, summary_cover, summary_percent,))
-
-    for filename, (n_lines, n_covered, percent_covered,) in info_dict_items:
-        html_outfile = make_html_filename(filename)
-
-        index_fp.write('''
-<tr>
- <td><a href="./%s">%s</a></td>
- <td>%d</td>
- <td>%d</td>
- <td>%.1f</td>
-</tr>
-''' % (html_outfile, filename, n_lines, n_covered, percent_covered,))
-
-    index_fp.write('</table>\n')
-    index_fp.close()
-    
-
-def report_as_html(coverage, directory, exclude_patterns, files_list):
-    """
-    Write an HTML report on all of the files, plus a summary.
-    """
-
-    ### now, output.
-
-    keys = coverage.keys()
-    info_dict = {}
-    for pyfile in filter_files(keys, exclude_patterns, files_list):
-
-        try:
-            fp = open(pyfile, 'rU')
-            lines = figleaf.get_lines(fp)
-        except KeyboardInterrupt:
-            raise
-        except IOError:
-            logger.error('CANNOT OPEN: %s' % (pyfile,))
-            continue
-        except Exception, e:
-            logger.error('ERROR: file %s, exception %s' % (pyfile, str(e)))
-            continue
-
-        #
-        # ok, we want to annotate this file.  now annotate file ==> html.
-        #
-
-        # initialize
-        covered = coverage.get(pyfile, set())
-
-        # rewind
-        fp.seek(0)
-
-        # annotate
-        output, n_covered, n_lines, percent = annotate_file(fp, lines, covered)
-
-        # summarize
-        info_dict[pyfile] = (n_lines, n_covered, percent)
-
-        # write out the file
-        html_outfile = make_html_filename(pyfile)
-        html_outfile = os.path.join(directory, html_outfile)
-        html_outfp = open(html_outfile, 'w')
-        
-        html_outfp.write('source file: <b>%s</b><br>\n' % (pyfile,))
-        html_outfp.write('''
-
-file stats: <b>%d lines, %d executed: %.1f%% covered</b>
-<pre>
-%s
-</pre>
-
-''' % (n_lines, n_covered, percent, "\n".join(output)))
-            
-        html_outfp.close()
-
-        logger.info('reported on %s' % (pyfile,))
-
-    ### print a summary, too.
-    write_html_summary(info_dict, directory)
-
-    logger.info('reported on %d file(s) total\n' % len(info_dict))
-
-def prepare_reportdir(dirname):
-    "Create output directory."
-    try:
-        os.mkdir(dirname)
-    except OSError:                         # already exists
-        pass
-
-def make_html_filename(orig):
-    "'escape' original paths into a single filename"
-
-    orig = os.path.abspath(orig)
-#    orig = os.path.basename(orig)
-    orig = os.path.splitdrive(orig)[1]
-    orig = orig.replace('_', '__')
-    orig = orig.replace(os.path.sep, '_')
-    orig += '.html'
-    return orig
-
-def escape_html(s):
-    s = s.replace("&", "&amp;")
-    s = s.replace("<", "&lt;")
-    s = s.replace(">", "&gt;")
-    s = s.replace('"', "&quot;")
-    return s
-
-def main():
-    import sys
-    import logging
-    from optparse import OptionParser
-    ###
-
-    usage = "usage: %prog [options] [coverage files ... ]"
-    option_parser = OptionParser(usage=usage)
-
-    option_parser.add_option('-x', '--exclude-patterns', action="store",
-                             dest="exclude_patterns_file",
-        help="file containing regexp patterns of files to exclude from report")
-
-    option_parser.add_option('-f', '--files-list', action="store",
-                             dest="files_list",
-                             help="file containing filenames to report on")
-
-    option_parser.add_option('-d', '--output-directory', action='store',
-                             dest="output_dir",
-                             default = "html",
-                             help="directory for HTML output")
-
-    option_parser.add_option('-q', '--quiet', action='store_true',
-                             dest='quiet',
-                             help='Suppress all but error messages')
-    
-    option_parser.add_option('-D', '--debug', action='store_true',
-                             dest='debug',
-                             help='Show all debugging messages')
-
-    (options, args) = option_parser.parse_args()
-
-    if options.quiet:
-        logging.disable(logging.DEBUG)
-        
-    if options.debug:
-        logger.setLevel(logging.DEBUG)
-
-    ### load/combine
-
-    if not args:
-        args = ['.figleaf']
-
-    coverage = {}
-    for filename in args:
-        logger.debug("loading coverage info from '%s'\n" % (filename,))
-        try:
-            d = figleaf.read_coverage(filename)
-            coverage = figleaf.combine_coverage(coverage, d)
-        except IOError:
-            logger.error("cannot open filename '%s'\n" % (filename,))
-
-    if not coverage:
-        logger.warning('EXITING -- no coverage info!\n')
-        sys.exit(-1)
-
-    exclude = []
-    if options.exclude_patterns_file:
-        exclude = read_exclude_patterns(options.exclude_patterns_file)
-
-    files_list = {}
-    if options.files_list:
-        files_list = read_files_list(options.files_list)
-
-    ### make directory
-    prepare_reportdir(options.output_dir)
-    report_as_html(coverage, options.output_dir, exclude, files_list)
-
-    print 'figleaf: HTML output written to %s' % (options.output_dir,)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/annotate_sections.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/annotate_sections.py b/tools/bin/ext/figleaf/annotate_sections.py
deleted file mode 100644
index 6d7fb89..0000000
--- a/tools/bin/ext/figleaf/annotate_sections.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#! /usr/bin/env python
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import figleaf
-from figleaf import internals
-from sets import Set as set
-import sys
-from cPickle import load
-import os
-from optparse import OptionParser
-
-def main():
-    #### OPTIONS
-
-    parser = OptionParser()
-
-    parser.add_option('-c', '--coverage', nargs=1, action="store",
-                      dest="coverage_file", 
-                      help = 'load coverage info from this file',
-                      default='.figleaf_sections')
-
-    ####
-
-    (options, args) = parser.parse_args(sys.argv[1:])
-    coverage_file = options.coverage_file
-    
-    figleaf.load_pickled_coverage(open(coverage_file))
-    data = internals.CoverageData(figleaf._t)
-    full_cov = data.gather_files()
-
-    for filename in args:
-        annotate_file_with_sections(filename, data, full_cov)
-
-def annotate_file_with_sections(short, data, full_cov):
-    full = os.path.abspath(short)
-
-    tags = {}
-    sections = data.gather_sections(full)
-    sections.update(data.gather_sections(short))
-
-    print data.sections
-
-    print '*** PROCESSING:', short, '\n\t==>', short + '.sections'
-    for tag, cov in sections.items():
-        if cov:
-            tags[tag] = cov
-
-    if not tags:
-        print '*** No coverage info for file', short
-
-    tag_names = tags.keys()
-    tag_names.sort()
-    tag_names.reverse()
-
-    tags["-- all coverage --"] = full_cov.get(full, set())
-    tag_names.insert(0, "-- all coverage --")
-
-    n_tags = len(tag_names)
-    
-    fp = open(short + '.sections', 'w')
-
-    for i, tag in enumerate(tag_names):
-        fp.write('%s%s\n' % ('| ' * i, tag))
-    fp.write('| ' * n_tags)
-    fp.write('\n\n')
-
-    source = open(full)
-    for n, line in enumerate(source):
-        marks = ""
-        for tag in tag_names:
-            cov = tags[tag]
-
-            symbol = '  '
-            if (n+1) in cov:
-                symbol = '+ '
-
-            marks += symbol
-
-        fp.write('%s  | %s' % (marks, line))
-    
-    fp.close()

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/figleaf2html
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/figleaf2html b/tools/bin/ext/figleaf/figleaf2html
deleted file mode 100755
index 58636b0..0000000
--- a/tools/bin/ext/figleaf/figleaf2html
+++ /dev/null
@@ -1,7 +0,0 @@
-#! /usr/bin/env python
-"""
-Output an HTML-ized coverage report.
-"""
-import _lib
-import figleaf.annotate_html
-figleaf.annotate_html.main()

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/internals.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/internals.py b/tools/bin/ext/figleaf/internals.py
deleted file mode 100644
index 6a25c0e..0000000
--- a/tools/bin/ext/figleaf/internals.py
+++ /dev/null
@@ -1,257 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-"""
-Coverage tracking internals.
-"""
-
-import sys
-import threading
-
-err = sys.stderr
-
-import types, symbol
-
-# use builtin sets if in >= 2.4, otherwise use 'sets' module.
-try:
-    set()
-except NameError:
-    from sets import Set as set
-
-def get_interesting_lines(code):
-    """
-    Count 'interesting' lines of Python in a code object, where
-    'interesting' is defined as 'lines that could possibly be
-    executed'.
-
-    This is done by dissassembling the code objecte and returning
-    line numbers.
-    """
-
-    # clean up weird end-of-file issues
-
-    lines = set([ l for (o, l) in findlinestarts(code) ])
-    for const in code.co_consts:
-        if type(const) == types.CodeType:
-            lines.update(get_interesting_lines(const))
-
-    return lines
-
-def findlinestarts(code):
-    """Find the offsets in a byte code which are start of lines in the source.
-
-    Generate pairs (offset, lineno) as described in Python/compile.c.
-
-    CTB -- swiped from Python 2.5, module 'dis', so that earlier versions
-    of Python could use the function, too.
-    """
-    byte_increments = [ord(c) for c in code.co_lnotab[0::2]]
-    line_increments = [ord(c) for c in code.co_lnotab[1::2]]
-
-    lastlineno = None
-    lineno = code.co_firstlineno
-    addr = 0
-    for byte_incr, line_incr in zip(byte_increments, line_increments):
-        if byte_incr:
-            if lineno != lastlineno:
-                yield (addr, lineno)
-                lastlineno = lineno
-            addr += byte_incr
-        lineno += line_incr
-    if lineno != lastlineno:
-        yield (addr, lineno)
-
-class CodeTracer:
-    """
-    Basic mechanisms for code coverage tracking, using sys.settrace.  
-    """
-    def __init__(self, exclude_prefix, include_only_prefix):
-        self.common = self.c = set()
-        self.section_name = None
-        self.sections = {}
-        
-        self.started = False
-
-        assert not (exclude_prefix and include_only_prefix), \
-               "mutually exclusive"
-        
-        self.excl = exclude_prefix
-        self.incl = include_only_prefix
-
-    def start(self):
-        """
-        Start recording.
-        """
-        if not self.started:
-            self.started = True
-
-            if self.excl and not self.incl:
-                global_trace_fn = self.g1
-            elif self.incl and not self.excl:
-                global_trace_fn = self.g2
-            else:
-                global_trace_fn = self.g0
-
-            sys.settrace(global_trace_fn)
-
-            if hasattr(threading, 'settrace'):
-                threading.settrace(global_trace_fn)
-
-    def stop(self):
-        if self.started:
-            sys.settrace(None)
-            
-            if hasattr(threading, 'settrace'):
-                threading.settrace(None)
-
-            self.started = False
-            self.stop_section()
-
-    def g0(self, f, e, a):
-        """
-        global trace function, no exclude/include info.
-
-        f == frame, e == event, a == arg        .
-        """
-        if e == 'call':
-            return self.t
-
-    def g1(self, f, e, a):
-        """
-        global trace function like g0, but ignores files starting with
-        'self.excl'.
-        """
-        if e == 'call':
-            excl = self.excl
-            path = f.f_globals.get('__file__')
-            if path is None:
-                path = f.f_code.co_filename
-
-            if excl and path.startswith(excl):
-                return
-
-            return self.t
-
-    def g2(self, f, e, a):
-        """
-        global trace function like g0, but only records files starting with
-        'self.incl'.
-        """
-        if e == 'call':
-            incl = self.incl
-            if incl and f.f_code.co_filename.startswith(incl):
-                return self.t
-
-    def t(self, f, e, a):
-        """
-        local trace function.
-        """
-        if e is 'line':
-            self.c.add((f.f_code.co_filename, f.f_lineno))
-        return self.t
-
-    def clear(self):
-        """
-        wipe out coverage info
-        """
-
-        self.c = {}
-
-    def start_section(self, name):
-        self.stop_section()
-
-        self.section_name = name
-        self.c = self.sections.get(name, set())
-        
-    def stop_section(self):
-        if self.section_name:
-            self.sections[self.section_name] = self.c
-            self.section_name = None
-            self.c = self.common
-
-class CoverageData:
-    """
-    A class to manipulate and combine data from the CodeTracer object.
-
-    In general, do not pickle this object; it's simpler and more
-    straightforward to just pass the basic Python objects around
-    (e.g. CoverageData.common, a set, and CoverageData.sections, a
-    dictionary of sets).
-    """
-    def __init__(self, trace_obj=None):
-        self.common = set()
-        self.sections = {}
-        
-        if trace_obj:
-            self.update(trace_obj)
-            
-    def update(self, trace_obj):
-        # transfer common-block code coverage -- if no sections are set,
-        # this will be all of the code coverage info.
-        self.common.update(trace_obj.common)
-
-        # update our internal section dictionary with the (filename, line_no)
-        # pairs from the section coverage as well.
-        
-        for section_name, section_d in trace_obj.sections.items():
-            section_set = self.sections.get(section_name, set())
-            section_set.update(section_d)
-            self.sections[section_name] = section_set
-
-    def gather_files(self, name=None):
-        """
-        Return the dictionary of lines of executed code; the dict
-        keys are filenames and values are sets containing individual
-        (integer) line numbers.
-        
-        'name', if set, is the desired section name from which to gather
-        coverage info.
-        """
-        cov = set()
-        cov.update(self.common)
-
-        if name is None:
-            for section_name, coverage_set in self.sections.items():
-                cov.update(coverage_set)
-        else:
-            coverage_set = self.sections.get(name, set())
-            cov.update(coverage_set)
-            
-#        cov = list(cov)
-#        cov.sort()
-
-        files = {}
-        for (filename, line) in cov:    # @CTB could optimize
-            d = files.get(filename, set())
-            d.add(line)
-            files[filename] = d
-
-        return files
-
-    def gather_sections(self, file):
-        """
-        Return a dictionary of sets containing section coverage information for
-        a specific file.  Dict keys are sections, and the dict values are
-        sets containing (integer) line numbers.
-        """
-        sections = {}
-        for k, c in self.sections.items():
-            s = set()
-            for (filename, line) in c.keys():
-                if filename == file:
-                    s.add(line)
-            sections[k] = s
-        return sections

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/figleaf/nose_sections.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/figleaf/nose_sections.py b/tools/bin/ext/figleaf/nose_sections.py
deleted file mode 100644
index 09654a5..0000000
--- a/tools/bin/ext/figleaf/nose_sections.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-"""
-figleafsections plugin for nose.
-
-Automatically records coverage info for Python tests and connects it with
-with test was being run at the time.  Can be used to produce a "barcode"
-of code execution.
-"""
-
-DEFAULT_COVERAGE_FILE='.figleaf_sections'
-import pkg_resources
-
-try:
-    pkg_resources.require('figleaf>=0.6.1')
-    import figleaf
-except ImportError:
-    figleaf = None
-
-import sys
-err = sys.stderr
-
-import nose.case
-from nose.plugins.base import Plugin
-
-import logging
-import os
-
-log = logging.getLogger(__name__)
-
-def calc_testname(test):
-    """
-    Build a reasonably human-readable testname from each test.
-    """
-    name = str(test)
-    if ' ' in name:
-        name = name.split(' ')[1]
-
-    return name
-
-class FigleafSections(Plugin):
-    def __init__(self):
-        self.name = 'figleafsections'
-        Plugin.__init__(self)
-        self.testname = None
-
-    def add_options(self, parser, env=os.environ):
-        env_opt = 'NOSE_WITH_%s' % self.name.upper()
-        env_opt.replace('-', '_')
-        parser.add_option("--with-%s" % self.name,
-                          action="store_true",
-                          dest=self.enableOpt,
-                          default=env.get(env_opt),
-                          help="Enable plugin %s: %s [%s]" %
-                          (self.__class__.__name__, self.help(), env_opt))
-
-        parser.add_option("--figleaf-file",
-                          action="store",
-                          dest="figleaf_file",
-                          default=None,
-                          help="Store figleaf section coverage in this file")
-        
-    def configure(self, options, config):
-        """
-        Configure: enable plugin?  And if so, where should the coverage
-        info be placed?
-        """
-        self.conf = config
-
-        # enable?
-        if hasattr(options, self.enableOpt):
-            self.enabled = getattr(options, self.enableOpt)
-
-        ### save coverage file name, if given.
-        if options.figleaf_file:
-            self.figleaf_file = options.figleaf_file
-        else:
-            self.figleaf_file = DEFAULT_COVERAGE_FILE
-
-        if self.enabled and figleaf is None:
-                raise Exception("You must install figleaf 0.6.1 before you can use the figleafsections plugin! See http://darcs.idyll.org/~t/projects/figleaf/doc/")
-
-    def begin(self):
-        """
-        Initialize: start recording coverage info.
-        """
-        figleaf.start()
-
-    def finalize(self, result):
-        """
-        Finalize: stop recording coverage info, save & exit.
-        """
-        figleaf.stop()
-        
-        fp = open(self.figleaf_file, 'w')
-        figleaf.dump_pickled_coverage(fp)
-        fp.close()
-
-    def startTest(self, test):
-        """
-        Run at the beginning of each test, before per-test fixtures.
-
-        One weakness is that this is only run for specific kinds of
-        nose testcases.
-        """
-        if isinstance(test, nose.case.Test):
-           
-            self.testname = calc_testname(test)
-            assert self.testname
-
-            figleaf.start_section(self.testname)
-
-    def stopTest(self, test):
-        """
-        Run at the end of each test, after per-test fixtures.
-        """
-        if self.testname:
-            figleaf.stop_section()
-            self.testname = None

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/0672292f/tools/bin/ext/pg8000/__init__.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/pg8000/__init__.py b/tools/bin/ext/pg8000/__init__.py
deleted file mode 100644
index 57de8e8..0000000
--- a/tools/bin/ext/pg8000/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# vim: sw=4:expandtab:foldmethod=marker
-#
-# Copyright (c) 2007-2009, Mathieu Fenniak
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# * The name of the author may not be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-# POSSIBILITY OF SUCH DAMAGE.
-
-__author__ = "Mathieu Fenniak"
-
-import dbapi as DBAPI
-pg8000_dbapi = DBAPI
-
-from interface import *
-from types import Bytea
-