You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by rv...@apache.org on 2015/09/22 21:14:13 UTC

[10/35] incubator-hawq git commit: SGA import. Now with files previously missing because of the .gitignore issue

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/__init__.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/__init__.py b/tools/bin/ext/yaml/__init__.py
new file mode 100644
index 0000000..bd233a8
--- /dev/null
+++ b/tools/bin/ext/yaml/__init__.py
@@ -0,0 +1,290 @@
+
+from error import *
+
+from tokens import *
+from events import *
+from nodes import *
+
+from loader import *
+from dumper import *
+
+try:
+    from cyaml import *
+except ImportError:
+    pass
+
+def scan(stream, Loader=Loader):
+    """
+    Scan a YAML stream and produce scanning tokens.
+    """
+    loader = Loader(stream)
+    while loader.check_token():
+        yield loader.get_token()
+
+def parse(stream, Loader=Loader):
+    """
+    Parse a YAML stream and produce parsing events.
+    """
+    loader = Loader(stream)
+    while loader.check_event():
+        yield loader.get_event()
+
+def compose(stream, Loader=Loader):
+    """
+    Parse the first YAML document in a stream
+    and produce the corresponding representation tree.
+    """
+    loader = Loader(stream)
+    if loader.check_node():
+        return loader.get_node()
+
+def compose_all(stream, Loader=Loader):
+    """
+    Parse all YAML documents in a stream
+    and produce corresponsing representation trees.
+    """
+    loader = Loader(stream)
+    while loader.check_node():
+        yield loader.get_node()
+
+def load_all(stream, Loader=Loader):
+    """
+    Parse all YAML documents in a stream
+    and produce corresponding Python objects.
+    """
+    loader = Loader(stream)
+    while loader.check_data():
+        yield loader.get_data()
+
+def load(stream, Loader=Loader):
+    """
+    Parse the first YAML document in a stream
+    and produce the corresponding Python object.
+    """
+    loader = Loader(stream)
+    if loader.check_data():
+        return loader.get_data()
+
+def safe_load_all(stream):
+    """
+    Parse all YAML documents in a stream
+    and produce corresponding Python objects.
+    Resolve only basic YAML tags.
+    """
+    return load_all(stream, SafeLoader)
+
+def safe_load(stream):
+    """
+    Parse the first YAML document in a stream
+    and produce the corresponding Python object.
+    Resolve only basic YAML tags.
+    """
+    return load(stream, SafeLoader)
+
+def emit(events, stream=None, Dumper=Dumper,
+        canonical=None, indent=None, width=None,
+        allow_unicode=None, line_break=None):
+    """
+    Emit YAML parsing events into a stream.
+    If stream is None, return the produced string instead.
+    """
+    getvalue = None
+    if stream is None:
+        try:
+            from cStringIO import StringIO
+        except ImportError:
+            from StringIO import StringIO
+        stream = StringIO()
+        getvalue = stream.getvalue
+    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+            allow_unicode=allow_unicode, line_break=line_break)
+    for event in events:
+        dumper.emit(event)
+    if getvalue:
+        return getvalue()
+
+def serialize_all(nodes, stream=None, Dumper=Dumper,
+        canonical=None, indent=None, width=None,
+        allow_unicode=None, line_break=None,
+        encoding='utf-8', explicit_start=None, explicit_end=None,
+        version=None, tags=None):
+    """
+    Serialize a sequence of representation trees into a YAML stream.
+    If stream is None, return the produced string instead.
+    """
+    getvalue = None
+    if stream is None:
+        try:
+            from cStringIO import StringIO
+        except ImportError:
+            from StringIO import StringIO
+        stream = StringIO()
+        getvalue = stream.getvalue
+    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+            allow_unicode=allow_unicode, line_break=line_break,
+            encoding=encoding, version=version, tags=tags,
+            explicit_start=explicit_start, explicit_end=explicit_end)
+    dumper.open()
+    for node in nodes:
+        dumper.serialize(node)
+    dumper.close()
+    if getvalue:
+        return getvalue()
+
+def serialize(node, stream=None, Dumper=Dumper, **kwds):
+    """
+    Serialize a representation tree into a YAML stream.
+    If stream is None, return the produced string instead.
+    """
+    return serialize_all([node], stream, Dumper=Dumper, **kwds)
+
+def dump_all(documents, stream=None, Dumper=Dumper,
+        default_style=None, default_flow_style=None,
+        canonical=None, indent=None, width=None,
+        allow_unicode=None, line_break=None,
+        encoding='utf-8', explicit_start=None, explicit_end=None,
+        version=None, tags=None):
+    """
+    Serialize a sequence of Python objects into a YAML stream.
+    If stream is None, return the produced string instead.
+    """
+    getvalue = None
+    if stream is None:
+        try:
+            from cStringIO import StringIO
+        except ImportError:
+            from StringIO import StringIO
+        stream = StringIO()
+        getvalue = stream.getvalue
+    dumper = Dumper(stream, default_style=default_style,
+            default_flow_style=default_flow_style,
+            canonical=canonical, indent=indent, width=width,
+            allow_unicode=allow_unicode, line_break=line_break,
+            encoding=encoding, version=version, tags=tags,
+            explicit_start=explicit_start, explicit_end=explicit_end)
+    dumper.open()
+    for data in documents:
+        dumper.represent(data)
+    dumper.close()
+    if getvalue:
+        return getvalue()
+
+def dump(data, stream=None, Dumper=Dumper, **kwds):
+    """
+    Serialize a Python object into a YAML stream.
+    If stream is None, return the produced string instead.
+    """
+    return dump_all([data], stream, Dumper=Dumper, **kwds)
+
+def safe_dump_all(documents, stream=None, **kwds):
+    """
+    Serialize a sequence of Python objects into a YAML stream.
+    Produce only basic YAML tags.
+    If stream is None, return the produced string instead.
+    """
+    return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
+
+def safe_dump(data, stream=None, **kwds):
+    """
+    Serialize a Python object into a YAML stream.
+    Produce only basic YAML tags.
+    If stream is None, return the produced string instead.
+    """
+    return dump_all([data], stream, Dumper=SafeDumper, **kwds)
+
+def add_implicit_resolver(tag, regexp, first=None,
+        Loader=Loader, Dumper=Dumper):
+    """
+    Add an implicit scalar detector.
+    If an implicit scalar value matches the given regexp,
+    the corresponding tag is assigned to the scalar.
+    first is a sequence of possible initial characters or None.
+    """
+    Loader.add_implicit_resolver(tag, regexp, first)
+    Dumper.add_implicit_resolver(tag, regexp, first)
+
+def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
+    """
+    Add a path based resolver for the given tag.
+    A path is a list of keys that forms a path
+    to a node in the representation tree.
+    Keys can be string values, integers, or None.
+    """
+    Loader.add_path_resolver(tag, path, kind)
+    Dumper.add_path_resolver(tag, path, kind)
+
+def add_constructor(tag, constructor, Loader=Loader):
+    """
+    Add a constructor for the given tag.
+    Constructor is a function that accepts a Loader instance
+    and a node object and produces the corresponding Python object.
+    """
+    Loader.add_constructor(tag, constructor)
+
+def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
+    """
+    Add a multi-constructor for the given tag prefix.
+    Multi-constructor is called for a node if its tag starts with tag_prefix.
+    Multi-constructor accepts a Loader instance, a tag suffix,
+    and a node object and produces the corresponding Python object.
+    """
+    Loader.add_multi_constructor(tag_prefix, multi_constructor)
+
+def add_representer(data_type, representer, Dumper=Dumper):
+    """
+    Add a representer for the given type.
+    Representer is a function accepting a Dumper instance
+    and an instance of the given data type
+    and producing the corresponding representation node.
+    """
+    Dumper.add_representer(data_type, representer)
+
+def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
+    """
+    Add a representer for the given type.
+    Multi-representer is a function accepting a Dumper instance
+    and an instance of the given data type or subtype
+    and producing the corresponding representation node.
+    """
+    Dumper.add_multi_representer(data_type, multi_representer)
+
+class YAMLObjectMetaclass(type):
+    """
+    The metaclass for YAMLObject.
+    """
+    def __init__(cls, name, bases, kwds):
+        super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+        if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
+            cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+            cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+
+class YAMLObject(object):
+    """
+    An object that can dump itself to a YAML stream
+    and load itself from a YAML stream.
+    """
+
+    __metaclass__ = YAMLObjectMetaclass
+    __slots__ = ()  # no direct instantiation, so allow immutable subclasses
+
+    yaml_loader = Loader
+    yaml_dumper = Dumper
+
+    yaml_tag = None
+    yaml_flow_style = None
+
+    def from_yaml(cls, loader, node):
+        """
+        Convert a representation node to a Python object.
+        """
+        return loader.construct_yaml_object(node, cls)
+    from_yaml = classmethod(from_yaml)
+
+    def to_yaml(cls, dumper, data):
+        """
+        Convert a Python object to a representation node.
+        """
+        return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
+                flow_style=cls.yaml_flow_style)
+    to_yaml = classmethod(to_yaml)
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/__init__.pyc
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/__init__.pyc b/tools/bin/ext/yaml/__init__.pyc
new file mode 100644
index 0000000..003ad21
Binary files /dev/null and b/tools/bin/ext/yaml/__init__.pyc differ

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/composer.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/composer.py b/tools/bin/ext/yaml/composer.py
new file mode 100644
index 0000000..9f5cd87
--- /dev/null
+++ b/tools/bin/ext/yaml/composer.py
@@ -0,0 +1,118 @@
+
+__all__ = ['Composer', 'ComposerError']
+
+from error import MarkedYAMLError
+from events import *
+from nodes import *
+
+class ComposerError(MarkedYAMLError):
+    pass
+
+class Composer(object):
+
+    def __init__(self):
+        self.anchors = {}
+
+    def check_node(self):
+        # Drop the STREAM-START event.
+        if self.check_event(StreamStartEvent):
+            self.get_event()
+
+        # If there are more documents available?
+        return not self.check_event(StreamEndEvent)
+
+    def get_node(self):
+        # Get the root node of the next document.
+        if not self.check_event(StreamEndEvent):
+            return self.compose_document()
+
+    def compose_document(self):
+        # Drop the DOCUMENT-START event.
+        self.get_event()
+
+        # Compose the root node.
+        node = self.compose_node(None, None)
+
+        # Drop the DOCUMENT-END event.
+        self.get_event()
+
+        self.anchors = {}
+        return node
+
+    def compose_node(self, parent, index):
+        if self.check_event(AliasEvent):
+            event = self.get_event()
+            anchor = event.anchor
+            if anchor not in self.anchors:
+                raise ComposerError(None, None, "found undefined alias %r"
+                        % anchor.encode('utf-8'), event.start_mark)
+            return self.anchors[anchor]
+        event = self.peek_event()
+        anchor = event.anchor
+        if anchor is not None:
+            if anchor in self.anchors:
+                raise ComposerError("found duplicate anchor %r; first occurence"
+                        % anchor.encode('utf-8'), self.anchors[anchor].start_mark,
+                        "second occurence", event.start_mark)
+        self.descend_resolver(parent, index)
+        if self.check_event(ScalarEvent):
+            node = self.compose_scalar_node(anchor)
+        elif self.check_event(SequenceStartEvent):
+            node = self.compose_sequence_node(anchor)
+        elif self.check_event(MappingStartEvent):
+            node = self.compose_mapping_node(anchor)
+        self.ascend_resolver()
+        return node
+
+    def compose_scalar_node(self, anchor):
+        event = self.get_event()
+        tag = event.tag
+        if tag is None or tag == u'!':
+            tag = self.resolve(ScalarNode, event.value, event.implicit)
+        node = ScalarNode(tag, event.value,
+                event.start_mark, event.end_mark, style=event.style)
+        if anchor is not None:
+            self.anchors[anchor] = node
+        return node
+
+    def compose_sequence_node(self, anchor):
+        start_event = self.get_event()
+        tag = start_event.tag
+        if tag is None or tag == u'!':
+            tag = self.resolve(SequenceNode, None, start_event.implicit)
+        node = SequenceNode(tag, [],
+                start_event.start_mark, None,
+                flow_style=start_event.flow_style)
+        if anchor is not None:
+            self.anchors[anchor] = node
+        index = 0
+        while not self.check_event(SequenceEndEvent):
+            node.value.append(self.compose_node(node, index))
+            index += 1
+        end_event = self.get_event()
+        node.end_mark = end_event.end_mark
+        return node
+
+    def compose_mapping_node(self, anchor):
+        start_event = self.get_event()
+        tag = start_event.tag
+        if tag is None or tag == u'!':
+            tag = self.resolve(MappingNode, None, start_event.implicit)
+        node = MappingNode(tag, [],
+                start_event.start_mark, None,
+                flow_style=start_event.flow_style)
+        if anchor is not None:
+            self.anchors[anchor] = node
+        while not self.check_event(MappingEndEvent):
+            #key_event = self.peek_event()
+            item_key = self.compose_node(node, None)
+            #if item_key in node.value:
+            #    raise ComposerError("while composing a mapping", start_event.start_mark,
+            #            "found duplicate key", key_event.start_mark)
+            item_value = self.compose_node(node, item_key)
+            #node.value[item_key] = item_value
+            node.value.append((item_key, item_value))
+        end_event = self.get_event()
+        node.end_mark = end_event.end_mark
+        return node
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/composer.pyc
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/composer.pyc b/tools/bin/ext/yaml/composer.pyc
new file mode 100644
index 0000000..ea754d3
Binary files /dev/null and b/tools/bin/ext/yaml/composer.pyc differ

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/constructor.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/constructor.py b/tools/bin/ext/yaml/constructor.py
new file mode 100644
index 0000000..a1295c8
--- /dev/null
+++ b/tools/bin/ext/yaml/constructor.py
@@ -0,0 +1,675 @@
+
+__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
+    'ConstructorError']
+
+from error import *
+from nodes import *
+
+import datetime
+
+try:
+    set
+except NameError:
+    from sets import Set as set
+
+import binascii, re, sys, types
+
+class ConstructorError(MarkedYAMLError):
+    pass
+
+class BaseConstructor(object):
+
+    yaml_constructors = {}
+    yaml_multi_constructors = {}
+
+    def __init__(self):
+        self.constructed_objects = {}
+        self.recursive_objects = {}
+        self.state_generators = []
+        self.deep_construct = False
+
+    def check_data(self):
+        # If there are more documents available?
+        return self.check_node()
+
+    def get_data(self):
+        # Construct and return the next document.
+        if self.check_node():
+            return self.construct_document(self.get_node())
+
+    def construct_document(self, node):
+        data = self.construct_object(node)
+        while self.state_generators:
+            state_generators = self.state_generators
+            self.state_generators = []
+            for generator in state_generators:
+                for dummy in generator:
+                    pass
+        self.constructed_objects = {}
+        self.recursive_objects = {}
+        self.deep_construct = False
+        return data
+
+    def construct_object(self, node, deep=False):
+        if deep:
+            old_deep = self.deep_construct
+            self.deep_construct = True
+        if node in self.constructed_objects:
+            return self.constructed_objects[node]
+        if node in self.recursive_objects:
+            raise ConstructorError(None, None,
+                    "found unconstructable recursive node", node.start_mark)
+        self.recursive_objects[node] = None
+        constructor = None
+        state_constructor = None
+        tag_suffix = None
+        if node.tag in self.yaml_constructors:
+            constructor = self.yaml_constructors[node.tag]
+        else:
+            for tag_prefix in self.yaml_multi_constructors:
+                if node.tag.startswith(tag_prefix):
+                    tag_suffix = node.tag[len(tag_prefix):]
+                    constructor = self.yaml_multi_constructors[tag_prefix]
+                    break
+            else:
+                if None in self.yaml_multi_constructors:
+                    tag_suffix = node.tag
+                    constructor = self.yaml_multi_constructors[None]
+                elif None in self.yaml_constructors:
+                    constructor = self.yaml_constructors[None]
+                elif isinstance(node, ScalarNode):
+                    constructor = self.__class__.construct_scalar
+                elif isinstance(node, SequenceNode):
+                    constructor = self.__class__.construct_sequence
+                elif isinstance(node, MappingNode):
+                    constructor = self.__class__.construct_mapping
+        if tag_suffix is None:
+            data = constructor(self, node)
+        else:
+            data = constructor(self, tag_suffix, node)
+        if isinstance(data, types.GeneratorType):
+            generator = data
+            data = generator.next()
+            if self.deep_construct:
+                for dummy in generator:
+                    pass
+            else:
+                self.state_generators.append(generator)
+        self.constructed_objects[node] = data
+        del self.recursive_objects[node]
+        if deep:
+            self.deep_construct = old_deep
+        return data
+
+    def construct_scalar(self, node):
+        if not isinstance(node, ScalarNode):
+            raise ConstructorError(None, None,
+                    "expected a scalar node, but found %s" % node.id,
+                    node.start_mark)
+        return node.value
+
+    def construct_sequence(self, node, deep=False):
+        if not isinstance(node, SequenceNode):
+            raise ConstructorError(None, None,
+                    "expected a sequence node, but found %s" % node.id,
+                    node.start_mark)
+        return [self.construct_object(child, deep=deep)
+                for child in node.value]
+
+    def construct_mapping(self, node, deep=False):
+        if not isinstance(node, MappingNode):
+            raise ConstructorError(None, None,
+                    "expected a mapping node, but found %s" % node.id,
+                    node.start_mark)
+        mapping = {}
+        for key_node, value_node in node.value:
+            key = self.construct_object(key_node, deep=deep)
+            try:
+                hash(key)
+            except TypeError, exc:
+                raise ConstructorError("while constructing a mapping", node.start_mark,
+                        "found unacceptable key (%s)" % exc, key_node.start_mark)
+            value = self.construct_object(value_node, deep=deep)
+            mapping[key] = value
+        return mapping
+
+    def construct_pairs(self, node, deep=False):
+        if not isinstance(node, MappingNode):
+            raise ConstructorError(None, None,
+                    "expected a mapping node, but found %s" % node.id,
+                    node.start_mark)
+        pairs = []
+        for key_node, value_node in node.value:
+            key = self.construct_object(key_node, deep=deep)
+            value = self.construct_object(value_node, deep=deep)
+            pairs.append((key, value))
+        return pairs
+
+    def add_constructor(cls, tag, constructor):
+        if not 'yaml_constructors' in cls.__dict__:
+            cls.yaml_constructors = cls.yaml_constructors.copy()
+        cls.yaml_constructors[tag] = constructor
+    add_constructor = classmethod(add_constructor)
+
+    def add_multi_constructor(cls, tag_prefix, multi_constructor):
+        if not 'yaml_multi_constructors' in cls.__dict__:
+            cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
+        cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+    add_multi_constructor = classmethod(add_multi_constructor)
+
+class SafeConstructor(BaseConstructor):
+
+    def construct_scalar(self, node):
+        if isinstance(node, MappingNode):
+            for key_node, value_node in node.value:
+                if key_node.tag == u'tag:yaml.org,2002:value':
+                    return self.construct_scalar(value_node)
+        return BaseConstructor.construct_scalar(self, node)
+
+    def flatten_mapping(self, node):
+        merge = []
+        index = 0
+        while index < len(node.value):
+            key_node, value_node = node.value[index]
+            if key_node.tag == u'tag:yaml.org,2002:merge':
+                del node.value[index]
+                if isinstance(value_node, MappingNode):
+                    self.flatten_mapping(value_node)
+                    merge.extend(value_node.value)
+                elif isinstance(value_node, SequenceNode):
+                    submerge = []
+                    for subnode in value_node.value:
+                        if not isinstance(subnode, MappingNode):
+                            raise ConstructorError("while constructing a mapping",
+                                    node.start_mark,
+                                    "expected a mapping for merging, but found %s"
+                                    % subnode.id, subnode.start_mark)
+                        self.flatten_mapping(subnode)
+                        submerge.append(subnode.value)
+                    submerge.reverse()
+                    for value in submerge:
+                        merge.extend(value)
+                else:
+                    raise ConstructorError("while constructing a mapping", node.start_mark,
+                            "expected a mapping or list of mappings for merging, but found %s"
+                            % value_node.id, value_node.start_mark)
+            elif key_node.tag == u'tag:yaml.org,2002:value':
+                key_node.tag = u'tag:yaml.org,2002:str'
+                index += 1
+            else:
+                index += 1
+        if merge:
+            node.value = merge + node.value
+
+    def construct_mapping(self, node, deep=False):
+        if isinstance(node, MappingNode):
+            self.flatten_mapping(node)
+        return BaseConstructor.construct_mapping(self, node, deep=deep)
+
+    def construct_yaml_null(self, node):
+        self.construct_scalar(node)
+        return None
+
+    bool_values = {
+        u'yes':     True,
+        u'no':      False,
+        u'true':    True,
+        u'false':   False,
+        u'on':      True,
+        u'off':     False,
+    }
+
+    def construct_yaml_bool(self, node):
+        value = self.construct_scalar(node)
+        return self.bool_values[value.lower()]
+
+    def construct_yaml_int(self, node):
+        value = str(self.construct_scalar(node))
+        value = value.replace('_', '')
+        sign = +1
+        if value[0] == '-':
+            sign = -1
+        if value[0] in '+-':
+            value = value[1:]
+        if value == '0':
+            return 0
+        elif value.startswith('0b'):
+            return sign*int(value[2:], 2)
+        elif value.startswith('0x'):
+            return sign*int(value[2:], 16)
+        elif value[0] == '0':
+            return sign*int(value, 8)
+        elif ':' in value:
+            digits = [int(part) for part in value.split(':')]
+            digits.reverse()
+            base = 1
+            value = 0
+            for digit in digits:
+                value += digit*base
+                base *= 60
+            return sign*value
+        else:
+            return sign*int(value)
+
+    inf_value = 1e300
+    while inf_value != inf_value*inf_value:
+        inf_value *= inf_value
+    nan_value = -inf_value/inf_value   # Trying to make a quiet NaN (like C99).
+
+    def construct_yaml_float(self, node):
+        value = str(self.construct_scalar(node))
+        value = value.replace('_', '').lower()
+        sign = +1
+        if value[0] == '-':
+            sign = -1
+        if value[0] in '+-':
+            value = value[1:]
+        if value == '.inf':
+            return sign*self.inf_value
+        elif value == '.nan':
+            return self.nan_value
+        elif ':' in value:
+            digits = [float(part) for part in value.split(':')]
+            digits.reverse()
+            base = 1
+            value = 0.0
+            for digit in digits:
+                value += digit*base
+                base *= 60
+            return sign*value
+        else:
+            return sign*float(value)
+
+    def construct_yaml_binary(self, node):
+        value = self.construct_scalar(node)
+        try:
+            return str(value).decode('base64')
+        except (binascii.Error, UnicodeEncodeError), exc:
+            raise ConstructorError(None, None,
+                    "failed to decode base64 data: %s" % exc, node.start_mark) 
+
+    timestamp_regexp = re.compile(
+            ur'''^(?P<year>[0-9][0-9][0-9][0-9])
+                -(?P<month>[0-9][0-9]?)
+                -(?P<day>[0-9][0-9]?)
+                (?:(?:[Tt]|[ \t]+)
+                (?P<hour>[0-9][0-9]?)
+                :(?P<minute>[0-9][0-9])
+                :(?P<second>[0-9][0-9])
+                (?:\.(?P<fraction>[0-9]*))?
+                (?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
+                (?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
+
+    def construct_yaml_timestamp(self, node):
+        value = self.construct_scalar(node)
+        match = self.timestamp_regexp.match(node.value)
+        values = match.groupdict()
+        year = int(values['year'])
+        month = int(values['month'])
+        day = int(values['day'])
+        if not values['hour']:
+            return datetime.date(year, month, day)
+        hour = int(values['hour'])
+        minute = int(values['minute'])
+        second = int(values['second'])
+        fraction = 0
+        if values['fraction']:
+            fraction = int(values['fraction'][:6].ljust(6, '0'))
+        delta = None
+        if values['tz_sign']:
+            tz_hour = int(values['tz_hour'])
+            tz_minute = int(values['tz_minute'] or 0)
+            delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
+            if values['tz_sign'] == '-':
+                delta = -delta
+        data = datetime.datetime(year, month, day, hour, minute, second, fraction)
+        if delta:
+            data -= delta
+        return data
+
+    def construct_yaml_omap(self, node):
+        # Note: we do not check for duplicate keys, because it's too
+        # CPU-expensive.
+        omap = []
+        yield omap
+        if not isinstance(node, SequenceNode):
+            raise ConstructorError("while constructing an ordered map", node.start_mark,
+                    "expected a sequence, but found %s" % node.id, node.start_mark)
+        for subnode in node.value:
+            if not isinstance(subnode, MappingNode):
+                raise ConstructorError("while constructing an ordered map", node.start_mark,
+                        "expected a mapping of length 1, but found %s" % subnode.id,
+                        subnode.start_mark)
+            if len(subnode.value) != 1:
+                raise ConstructorError("while constructing an ordered map", node.start_mark,
+                        "expected a single mapping item, but found %d items" % len(subnode.value),
+                        subnode.start_mark)
+            key_node, value_node = subnode.value[0]
+            key = self.construct_object(key_node)
+            value = self.construct_object(value_node)
+            omap.append((key, value))
+
+    def construct_yaml_pairs(self, node):
+        # Note: the same code as `construct_yaml_omap`.
+        pairs = []
+        yield pairs
+        if not isinstance(node, SequenceNode):
+            raise ConstructorError("while constructing pairs", node.start_mark,
+                    "expected a sequence, but found %s" % node.id, node.start_mark)
+        for subnode in node.value:
+            if not isinstance(subnode, MappingNode):
+                raise ConstructorError("while constructing pairs", node.start_mark,
+                        "expected a mapping of length 1, but found %s" % subnode.id,
+                        subnode.start_mark)
+            if len(subnode.value) != 1:
+                raise ConstructorError("while constructing pairs", node.start_mark,
+                        "expected a single mapping item, but found %d items" % len(subnode.value),
+                        subnode.start_mark)
+            key_node, value_node = subnode.value[0]
+            key = self.construct_object(key_node)
+            value = self.construct_object(value_node)
+            pairs.append((key, value))
+
+    def construct_yaml_set(self, node):
+        data = set()
+        yield data
+        value = self.construct_mapping(node)
+        data.update(value)
+
+    def construct_yaml_str(self, node):
+        value = self.construct_scalar(node)
+        try:
+            return value.encode('ascii')
+        except UnicodeEncodeError:
+            return value
+
+    def construct_yaml_seq(self, node):
+        data = []
+        yield data
+        data.extend(self.construct_sequence(node))
+
+    def construct_yaml_map(self, node):
+        data = {}
+        yield data
+        value = self.construct_mapping(node)
+        data.update(value)
+
+    def construct_yaml_object(self, node, cls):
+        data = cls.__new__(cls)
+        yield data
+        if hasattr(data, '__setstate__'):
+            state = self.construct_mapping(node, deep=True)
+            data.__setstate__(state)
+        else:
+            state = self.construct_mapping(node)
+            data.__dict__.update(state)
+
+    def construct_undefined(self, node):
+        raise ConstructorError(None, None,
+                "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
+                node.start_mark)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:null',
+        SafeConstructor.construct_yaml_null)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:bool',
+        SafeConstructor.construct_yaml_bool)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:int',
+        SafeConstructor.construct_yaml_int)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:float',
+        SafeConstructor.construct_yaml_float)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:binary',
+        SafeConstructor.construct_yaml_binary)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:timestamp',
+        SafeConstructor.construct_yaml_timestamp)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:omap',
+        SafeConstructor.construct_yaml_omap)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:pairs',
+        SafeConstructor.construct_yaml_pairs)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:set',
+        SafeConstructor.construct_yaml_set)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:str',
+        SafeConstructor.construct_yaml_str)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:seq',
+        SafeConstructor.construct_yaml_seq)
+
+SafeConstructor.add_constructor(
+        u'tag:yaml.org,2002:map',
+        SafeConstructor.construct_yaml_map)
+
+SafeConstructor.add_constructor(None,
+        SafeConstructor.construct_undefined)
+
+class Constructor(SafeConstructor):
+
+    def construct_python_str(self, node):
+        return self.construct_scalar(node).encode('utf-8')
+
+    def construct_python_unicode(self, node):
+        return self.construct_scalar(node)
+
+    def construct_python_long(self, node):
+        return long(self.construct_yaml_int(node))
+
+    def construct_python_complex(self, node):
+       return complex(self.construct_scalar(node))
+
+    def construct_python_tuple(self, node):
+        return tuple(self.construct_sequence(node))
+
+    def find_python_module(self, name, mark):
+        if not name:
+            raise ConstructorError("while constructing a Python module", mark,
+                    "expected non-empty name appended to the tag", mark)
+        try:
+            __import__(name)
+        except ImportError, exc:
+            raise ConstructorError("while constructing a Python module", mark,
+                    "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
+        return sys.modules[name]
+
+    def find_python_name(self, name, mark):
+        if not name:
+            raise ConstructorError("while constructing a Python object", mark,
+                    "expected non-empty name appended to the tag", mark)
+        if u'.' in name:
+            # Python 2.4 only
+            #module_name, object_name = name.rsplit('.', 1)
+            items = name.split('.')
+            object_name = items.pop()
+            module_name = '.'.join(items)
+        else:
+            module_name = '__builtin__'
+            object_name = name
+        try:
+            __import__(module_name)
+        except ImportError, exc:
+            raise ConstructorError("while constructing a Python object", mark,
+                    "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
+        module = sys.modules[module_name]
+        if not hasattr(module, object_name):
+            raise ConstructorError("while constructing a Python object", mark,
+                    "cannot find %r in the module %r" % (object_name.encode('utf-8'),
+                        module.__name__), mark)
+        return getattr(module, object_name)
+
+    def construct_python_name(self, suffix, node):
+        value = self.construct_scalar(node)
+        if value:
+            raise ConstructorError("while constructing a Python name", node.start_mark,
+                    "expected the empty value, but found %r" % value.encode('utf-8'),
+                    node.start_mark)
+        return self.find_python_name(suffix, node.start_mark)
+
+    def construct_python_module(self, suffix, node):
+        value = self.construct_scalar(node)
+        if value:
+            raise ConstructorError("while constructing a Python module", node.start_mark,
+                    "expected the empty value, but found %r" % value.encode('utf-8'),
+                    node.start_mark)
+        return self.find_python_module(suffix, node.start_mark)
+
+    class classobj: pass
+
+    def make_python_instance(self, suffix, node,
+            args=None, kwds=None, newobj=False):
+        if not args:
+            args = []
+        if not kwds:
+            kwds = {}
+        cls = self.find_python_name(suffix, node.start_mark)
+        if newobj and isinstance(cls, type(self.classobj))  \
+                and not args and not kwds:
+            instance = self.classobj()
+            instance.__class__ = cls
+            return instance
+        elif newobj and isinstance(cls, type):
+            return cls.__new__(cls, *args, **kwds)
+        else:
+            return cls(*args, **kwds)
+
+    def set_python_instance_state(self, instance, state):
+        if hasattr(instance, '__setstate__'):
+            instance.__setstate__(state)
+        else:
+            slotstate = {}
+            if isinstance(state, tuple) and len(state) == 2:
+                state, slotstate = state
+            if hasattr(instance, '__dict__'):
+                instance.__dict__.update(state)
+            elif state:
+                slotstate.update(state)
+            for key, value in slotstate.items():
+                setattr(object, key, value)
+
+    def construct_python_object(self, suffix, node):
+        # Format:
+        #   !!python/object:module.name { ... state ... }
+        instance = self.make_python_instance(suffix, node, newobj=True)
+        yield instance
+        deep = hasattr(instance, '__setstate__')
+        state = self.construct_mapping(node, deep=deep)
+        self.set_python_instance_state(instance, state)
+
+    def construct_python_object_apply(self, suffix, node, newobj=False):
+        # Format:
+        #   !!python/object/apply       # (or !!python/object/new)
+        #   args: [ ... arguments ... ]
+        #   kwds: { ... keywords ... }
+        #   state: ... state ...
+        #   listitems: [ ... listitems ... ]
+        #   dictitems: { ... dictitems ... }
+        # or short format:
+        #   !!python/object/apply [ ... arguments ... ]
+        # The difference between !!python/object/apply and !!python/object/new
+        # is how an object is created, check make_python_instance for details.
+        if isinstance(node, SequenceNode):
+            args = self.construct_sequence(node, deep=True)
+            kwds = {}
+            state = {}
+            listitems = []
+            dictitems = {}
+        else:
+            value = self.construct_mapping(node, deep=True)
+            args = value.get('args', [])
+            kwds = value.get('kwds', {})
+            state = value.get('state', {})
+            listitems = value.get('listitems', [])
+            dictitems = value.get('dictitems', {})
+        instance = self.make_python_instance(suffix, node, args, kwds, newobj)
+        if state:
+            self.set_python_instance_state(instance, state)
+        if listitems:
+            instance.extend(listitems)
+        if dictitems:
+            for key in dictitems:
+                instance[key] = dictitems[key]
+        return instance
+
+    def construct_python_object_new(self, suffix, node):
+        return self.construct_python_object_apply(suffix, node, newobj=True)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/none',
+    Constructor.construct_yaml_null)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/bool',
+    Constructor.construct_yaml_bool)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/str',
+    Constructor.construct_python_str)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/unicode',
+    Constructor.construct_python_unicode)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/int',
+    Constructor.construct_yaml_int)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/long',
+    Constructor.construct_python_long)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/float',
+    Constructor.construct_yaml_float)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/complex',
+    Constructor.construct_python_complex)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/list',
+    Constructor.construct_yaml_seq)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/tuple',
+    Constructor.construct_python_tuple)
+
+Constructor.add_constructor(
+    u'tag:yaml.org,2002:python/dict',
+    Constructor.construct_yaml_map)
+
+Constructor.add_multi_constructor(
+    u'tag:yaml.org,2002:python/name:',
+    Constructor.construct_python_name)
+
+Constructor.add_multi_constructor(
+    u'tag:yaml.org,2002:python/module:',
+    Constructor.construct_python_module)
+
+Constructor.add_multi_constructor(
+    u'tag:yaml.org,2002:python/object:',
+    Constructor.construct_python_object)
+
+Constructor.add_multi_constructor(
+    u'tag:yaml.org,2002:python/object/apply:',
+    Constructor.construct_python_object_apply)
+
+Constructor.add_multi_constructor(
+    u'tag:yaml.org,2002:python/object/new:',
+    Constructor.construct_python_object_new)
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/constructor.pyc
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/constructor.pyc b/tools/bin/ext/yaml/constructor.pyc
new file mode 100644
index 0000000..c127d54
Binary files /dev/null and b/tools/bin/ext/yaml/constructor.pyc differ

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/cyaml.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/cyaml.py b/tools/bin/ext/yaml/cyaml.py
new file mode 100644
index 0000000..14acb07
--- /dev/null
+++ b/tools/bin/ext/yaml/cyaml.py
@@ -0,0 +1,85 @@
+
+__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader',
+        'CBaseDumper', 'CSafeDumper', 'CDumper']
+
+from _yaml import CParser, CEmitter
+
+from constructor import *
+
+from serializer import *
+from representer import *
+
+from resolver import *
+
+class CBaseLoader(CParser, BaseConstructor, BaseResolver):
+
+    def __init__(self, stream):
+        CParser.__init__(self, stream)
+        BaseConstructor.__init__(self)
+        BaseResolver.__init__(self)
+
+class CSafeLoader(CParser, SafeConstructor, Resolver):
+
+    def __init__(self, stream):
+        CParser.__init__(self, stream)
+        SafeConstructor.__init__(self)
+        Resolver.__init__(self)
+
+class CLoader(CParser, Constructor, Resolver):
+
+    def __init__(self, stream):
+        CParser.__init__(self, stream)
+        Constructor.__init__(self)
+        Resolver.__init__(self)
+
+class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
+
+    def __init__(self, stream,
+            default_style=None, default_flow_style=None,
+            canonical=None, indent=None, width=None,
+            allow_unicode=None, line_break=None,
+            encoding=None, explicit_start=None, explicit_end=None,
+            version=None, tags=None):
+        CEmitter.__init__(self, stream, canonical=canonical,
+                indent=indent, width=width,
+                allow_unicode=allow_unicode, line_break=line_break,
+                explicit_start=explicit_start, explicit_end=explicit_end,
+                version=version, tags=tags)
+        Representer.__init__(self, default_style=default_style,
+                default_flow_style=default_flow_style)
+        Resolver.__init__(self)
+
+class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
+
+    def __init__(self, stream,
+            default_style=None, default_flow_style=None,
+            canonical=None, indent=None, width=None,
+            allow_unicode=None, line_break=None,
+            encoding=None, explicit_start=None, explicit_end=None,
+            version=None, tags=None):
+        CEmitter.__init__(self, stream, canonical=canonical,
+                indent=indent, width=width,
+                allow_unicode=allow_unicode, line_break=line_break,
+                explicit_start=explicit_start, explicit_end=explicit_end,
+                version=version, tags=tags)
+        SafeRepresenter.__init__(self, default_style=default_style,
+                default_flow_style=default_flow_style)
+        Resolver.__init__(self)
+
+class CDumper(CEmitter, Serializer, Representer, Resolver):
+
+    def __init__(self, stream,
+            default_style=None, default_flow_style=None,
+            canonical=None, indent=None, width=None,
+            allow_unicode=None, line_break=None,
+            encoding=None, explicit_start=None, explicit_end=None,
+            version=None, tags=None):
+        CEmitter.__init__(self, stream, canonical=canonical,
+                indent=indent, width=width,
+                allow_unicode=allow_unicode, line_break=line_break,
+                explicit_start=explicit_start, explicit_end=explicit_end,
+                version=version, tags=tags)
+        Representer.__init__(self, default_style=default_style,
+                default_flow_style=default_flow_style)
+        Resolver.__init__(self)
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/cyaml.pyc
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/cyaml.pyc b/tools/bin/ext/yaml/cyaml.pyc
new file mode 100644
index 0000000..5829abe
Binary files /dev/null and b/tools/bin/ext/yaml/cyaml.pyc differ

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/dumper.py
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/dumper.py b/tools/bin/ext/yaml/dumper.py
new file mode 100644
index 0000000..355c1e2
--- /dev/null
+++ b/tools/bin/ext/yaml/dumper.py
@@ -0,0 +1,62 @@
+
+__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
+
+from emitter import *
+from serializer import *
+from representer import *
+from resolver import *
+
+class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
+
+    def __init__(self, stream,
+            default_style=None, default_flow_style=None,
+            canonical=None, indent=None, width=None,
+            allow_unicode=None, line_break=None,
+            encoding=None, explicit_start=None, explicit_end=None,
+            version=None, tags=None):
+        Emitter.__init__(self, stream, canonical=canonical,
+                indent=indent, width=width,
+                allow_uncode=allow_unicode, line_break=line_break)
+        Serializer.__init__(self, encoding=encoding,
+                explicit_start=explicit_start, explicit_end=explicit_end,
+                version=version, tags=tags)
+        Representer.__init__(self, default_style=default_style,
+                default_flow_style=default_flow_style)
+        Resolver.__init__(self)
+
+class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
+
+    def __init__(self, stream,
+            default_style=None, default_flow_style=None,
+            canonical=None, indent=None, width=None,
+            allow_unicode=None, line_break=None,
+            encoding=None, explicit_start=None, explicit_end=None,
+            version=None, tags=None):
+        Emitter.__init__(self, stream, canonical=canonical,
+                indent=indent, width=width,
+                allow_unicode=allow_unicode, line_break=line_break)
+        Serializer.__init__(self, encoding=encoding,
+                explicit_start=explicit_start, explicit_end=explicit_end,
+                version=version, tags=tags)
+        SafeRepresenter.__init__(self, default_style=default_style,
+                default_flow_style=default_flow_style)
+        Resolver.__init__(self)
+
+class Dumper(Emitter, Serializer, Representer, Resolver):
+
+    def __init__(self, stream,
+            default_style=None, default_flow_style=None,
+            canonical=None, indent=None, width=None,
+            allow_unicode=None, line_break=None,
+            encoding=None, explicit_start=None, explicit_end=None,
+            version=None, tags=None):
+        Emitter.__init__(self, stream, canonical=canonical,
+                indent=indent, width=width,
+                allow_unicode=allow_unicode, line_break=line_break)
+        Serializer.__init__(self, encoding=encoding,
+                explicit_start=explicit_start, explicit_end=explicit_end,
+                version=version, tags=tags)
+        Representer.__init__(self, default_style=default_style,
+                default_flow_style=default_flow_style)
+        Resolver.__init__(self)
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a485be47/tools/bin/ext/yaml/dumper.pyc
----------------------------------------------------------------------
diff --git a/tools/bin/ext/yaml/dumper.pyc b/tools/bin/ext/yaml/dumper.pyc
new file mode 100644
index 0000000..611730d
Binary files /dev/null and b/tools/bin/ext/yaml/dumper.pyc differ