You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mynewt.apache.org by cc...@apache.org on 2015/11/21 01:42:32 UTC

[07/42] incubator-mynewt-newt git commit: Move newt source into a "newt" subdirectory.

http://git-wip-us.apache.org/repos/asf/incubator-mynewt-newt/blob/e95057f4/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE
----------------------------------------------------------------------
diff --git a/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE
new file mode 100644
index 0000000..a68e67f
--- /dev/null
+++ b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE
@@ -0,0 +1,188 @@
+
+Copyright (c) 2011-2014 - Canonical Inc.
+
+This software is licensed under the LGPLv3, included below.
+
+As a special exception to the GNU Lesser General Public License version 3
+("LGPL3"), the copyright holders of this Library give you permission to
+convey to a third party a Combined Work that links statically or dynamically
+to this Library without providing any Minimal Corresponding Source or
+Minimal Application Code as set out in 4d or providing the installation
+information set out in section 4e, provided that you comply with the other
+provisions of LGPL3 and provided that you meet, for the Application the
+terms and conditions of the license(s) which apply to the Application.
+
+Except as stated in this special exception, the provisions of LGPL3 will
+continue to comply in full to this Library. If you modify this Library, you
+may apply this exception to your version of this Library, but you are not
+obliged to do so. If you do not wish to do so, delete this exception
+statement from your version. This exception does not (and cannot) modify any
+license terms which apply to the Application, with which you must still
+comply.
+
+
+                   GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+  This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+  0. Additional Definitions.
+
+  As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+  "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+  An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+  A "Combined Work" is a work produced by combining or linking an
+Application with the Library.  The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+  The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+  The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+  1. Exception to Section 3 of the GNU GPL.
+
+  You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+  2. Conveying Modified Versions.
+
+  If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+   a) under this License, provided that you make a good faith effort to
+   ensure that, in the event an Application does not supply the
+   function or data, the facility still operates, and performs
+   whatever part of its purpose remains meaningful, or
+
+   b) under the GNU GPL, with none of the additional permissions of
+   this License applicable to that copy.
+
+  3. Object Code Incorporating Material from Library Header Files.
+
+  The object code form of an Application may incorporate material from
+a header file that is part of the Library.  You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+   a) Give prominent notice with each copy of the object code that the
+   Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the object code with a copy of the GNU GPL and this license
+   document.
+
+  4. Combined Works.
+
+  You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+   a) Give prominent notice with each copy of the Combined Work that
+   the Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the Combined Work with a copy of the GNU GPL and this license
+   document.
+
+   c) For a Combined Work that displays copyright notices during
+   execution, include the copyright notice for the Library among
+   these notices, as well as a reference directing the user to the
+   copies of the GNU GPL and this license document.
+
+   d) Do one of the following:
+
+       0) Convey the Minimal Corresponding Source under the terms of this
+       License, and the Corresponding Application Code in a form
+       suitable for, and under terms that permit, the user to
+       recombine or relink the Application with a modified version of
+       the Linked Version to produce a modified Combined Work, in the
+       manner specified by section 6 of the GNU GPL for conveying
+       Corresponding Source.
+
+       1) Use a suitable shared library mechanism for linking with the
+       Library.  A suitable mechanism is one that (a) uses at run time
+       a copy of the Library already present on the user's computer
+       system, and (b) will operate properly with a modified version
+       of the Library that is interface-compatible with the Linked
+       Version.
+
+   e) Provide Installation Information, but only if you would otherwise
+   be required to provide such information under section 6 of the
+   GNU GPL, and only to the extent that such information is
+   necessary to install and execute a modified version of the
+   Combined Work produced by recombining or relinking the
+   Application with a modified version of the Linked Version. (If
+   you use option 4d0, the Installation Information must accompany
+   the Minimal Corresponding Source and Corresponding Application
+   Code. If you use option 4d1, you must provide the Installation
+   Information in the manner specified by section 6 of the GNU GPL
+   for conveying Corresponding Source.)
+
+  5. Combined Libraries.
+
+  You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+   a) Accompany the combined library with a copy of the same work based
+   on the Library, uncombined with any other library facilities,
+   conveyed under the terms of this License.
+
+   b) Give prominent notice with the combined library that part of it
+   is a work based on the Library, and explaining where to find the
+   accompanying uncombined form of the same work.
+
+  6. Revised Versions of the GNU Lesser General Public License.
+
+  The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+  If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.

http://git-wip-us.apache.org/repos/asf/incubator-mynewt-newt/blob/e95057f4/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE.libyaml
----------------------------------------------------------------------
diff --git a/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE.libyaml b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE.libyaml
new file mode 100644
index 0000000..8da58fb
--- /dev/null
+++ b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/LICENSE.libyaml
@@ -0,0 +1,31 @@
+The following files were ported to Go from C files of libyaml, and thus
+are still covered by their original copyright and license:
+
+    apic.go
+    emitterc.go
+    parserc.go
+    readerc.go
+    scannerc.go
+    writerc.go
+    yamlh.go
+    yamlprivateh.go
+
+Copyright (c) 2006 Kirill Simonov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

http://git-wip-us.apache.org/repos/asf/incubator-mynewt-newt/blob/e95057f4/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/README.md
----------------------------------------------------------------------
diff --git a/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/README.md b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/README.md
new file mode 100644
index 0000000..d6c919e
--- /dev/null
+++ b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/README.md
@@ -0,0 +1,128 @@
+# YAML support for the Go language
+
+Introduction
+------------
+
+The yaml package enables Go programs to comfortably encode and decode YAML
+values. It was developed within [Canonical](https://www.canonical.com) as
+part of the [juju](https://juju.ubuntu.com) project, and is based on a
+pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
+C library to parse and generate YAML data quickly and reliably.
+
+Compatibility
+-------------
+
+The yaml package supports most of YAML 1.1 and 1.2, including support for
+anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
+implemented, and base-60 floats from YAML 1.1 are purposefully not
+supported since they're a poor design and are gone in YAML 1.2.
+
+Installation and usage
+----------------------
+
+The import path for the package is *gopkg.in/yaml.v2*.
+
+To install it, run:
+
+    go get gopkg.in/yaml.v2
+
+API documentation
+-----------------
+
+If opened in a browser, the import path itself leads to the API documentation:
+
+  * [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
+
+API stability
+-------------
+
+The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
+
+
+License
+-------
+
+The yaml package is licensed under the LGPL with an exception that allows it to be linked statically. Please see the LICENSE file for details.
+
+
+Example
+-------
+
+```Go
+package main
+
+import (
+        "fmt"
+        "log"
+
+        "gopkg.in/yaml.v2"
+)
+
+var data = `
+a: Easy!
+b:
+  c: 2
+  d: [3, 4]
+`
+
+type T struct {
+        A string
+        B struct{C int; D []int ",flow"}
+}
+
+func main() {
+        t := T{}
+    
+        err := yaml.Unmarshal([]byte(data), &t)
+        if err != nil {
+                log.Fatalf("error: %v", err)
+        }
+        fmt.Printf("--- t:\n%v\n\n", t)
+    
+        d, err := yaml.Marshal(&t)
+        if err != nil {
+                log.Fatalf("error: %v", err)
+        }
+        fmt.Printf("--- t dump:\n%s\n\n", string(d))
+    
+        m := make(map[interface{}]interface{})
+    
+        err = yaml.Unmarshal([]byte(data), &m)
+        if err != nil {
+                log.Fatalf("error: %v", err)
+        }
+        fmt.Printf("--- m:\n%v\n\n", m)
+    
+        d, err = yaml.Marshal(&m)
+        if err != nil {
+                log.Fatalf("error: %v", err)
+        }
+        fmt.Printf("--- m dump:\n%s\n\n", string(d))
+}
+```
+
+This example will generate the following output:
+
+```
+--- t:
+{Easy! {2 [3 4]}}
+
+--- t dump:
+a: Easy!
+b:
+  c: 2
+  d: [3, 4]
+
+
+--- m:
+map[a:Easy! b:map[c:2 d:[3 4]]]
+
+--- m dump:
+a: Easy!
+b:
+  c: 2
+  d:
+  - 3
+  - 4
+```
+

http://git-wip-us.apache.org/repos/asf/incubator-mynewt-newt/blob/e95057f4/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/apic.go
----------------------------------------------------------------------
diff --git a/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/apic.go b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/apic.go
new file mode 100644
index 0000000..95ec014
--- /dev/null
+++ b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/apic.go
@@ -0,0 +1,742 @@
+package yaml
+
+import (
+	"io"
+	"os"
+)
+
+func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
+	//fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
+
+	// Check if we can move the queue at the beginning of the buffer.
+	if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
+		if parser.tokens_head != len(parser.tokens) {
+			copy(parser.tokens, parser.tokens[parser.tokens_head:])
+		}
+		parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
+		parser.tokens_head = 0
+	}
+	parser.tokens = append(parser.tokens, *token)
+	if pos < 0 {
+		return
+	}
+	copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
+	parser.tokens[parser.tokens_head+pos] = *token
+}
+
+// Create a new parser object.
+func yaml_parser_initialize(parser *yaml_parser_t) bool {
+	*parser = yaml_parser_t{
+		raw_buffer: make([]byte, 0, input_raw_buffer_size),
+		buffer:     make([]byte, 0, input_buffer_size),
+	}
+	return true
+}
+
+// Destroy a parser object.
+func yaml_parser_delete(parser *yaml_parser_t) {
+	*parser = yaml_parser_t{}
+}
+
+// String read handler.
+func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
+	if parser.input_pos == len(parser.input) {
+		return 0, io.EOF
+	}
+	n = copy(buffer, parser.input[parser.input_pos:])
+	parser.input_pos += n
+	return n, nil
+}
+
+// File read handler.
+func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
+	return parser.input_file.Read(buffer)
+}
+
+// Set a string input.
+func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
+	if parser.read_handler != nil {
+		panic("must set the input source only once")
+	}
+	parser.read_handler = yaml_string_read_handler
+	parser.input = input
+	parser.input_pos = 0
+}
+
+// Set a file input.
+func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
+	if parser.read_handler != nil {
+		panic("must set the input source only once")
+	}
+	parser.read_handler = yaml_file_read_handler
+	parser.input_file = file
+}
+
+// Set the source encoding.
+func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
+	if parser.encoding != yaml_ANY_ENCODING {
+		panic("must set the encoding only once")
+	}
+	parser.encoding = encoding
+}
+
+// Create a new emitter object.
+func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
+	*emitter = yaml_emitter_t{
+		buffer:     make([]byte, output_buffer_size),
+		raw_buffer: make([]byte, 0, output_raw_buffer_size),
+		states:     make([]yaml_emitter_state_t, 0, initial_stack_size),
+		events:     make([]yaml_event_t, 0, initial_queue_size),
+	}
+	return true
+}
+
+// Destroy an emitter object.
+func yaml_emitter_delete(emitter *yaml_emitter_t) {
+	*emitter = yaml_emitter_t{}
+}
+
+// String write handler.
+func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
+	*emitter.output_buffer = append(*emitter.output_buffer, buffer...)
+	return nil
+}
+
+// File write handler.
+func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
+	_, err := emitter.output_file.Write(buffer)
+	return err
+}
+
+// Set a string output.
+func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
+	if emitter.write_handler != nil {
+		panic("must set the output target only once")
+	}
+	emitter.write_handler = yaml_string_write_handler
+	emitter.output_buffer = output_buffer
+}
+
+// Set a file output.
+func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
+	if emitter.write_handler != nil {
+		panic("must set the output target only once")
+	}
+	emitter.write_handler = yaml_file_write_handler
+	emitter.output_file = file
+}
+
+// Set the output encoding.
+func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
+	if emitter.encoding != yaml_ANY_ENCODING {
+		panic("must set the output encoding only once")
+	}
+	emitter.encoding = encoding
+}
+
+// Set the canonical output style.
+func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
+	emitter.canonical = canonical
+}
+
+//// Set the indentation increment.
+func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
+	if indent < 2 || indent > 9 {
+		indent = 2
+	}
+	emitter.best_indent = indent
+}
+
+// Set the preferred line width.
+func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
+	if width < 0 {
+		width = -1
+	}
+	emitter.best_width = width
+}
+
+// Set if unescaped non-ASCII characters are allowed.
+func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
+	emitter.unicode = unicode
+}
+
+// Set the preferred line break character.
+func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
+	emitter.line_break = line_break
+}
+
+///*
+// * Destroy a token object.
+// */
+//
+//YAML_DECLARE(void)
+//yaml_token_delete(yaml_token_t *token)
+//{
+//    assert(token);  // Non-NULL token object expected.
+//
+//    switch (token.type)
+//    {
+//        case YAML_TAG_DIRECTIVE_TOKEN:
+//            yaml_free(token.data.tag_directive.handle);
+//            yaml_free(token.data.tag_directive.prefix);
+//            break;
+//
+//        case YAML_ALIAS_TOKEN:
+//            yaml_free(token.data.alias.value);
+//            break;
+//
+//        case YAML_ANCHOR_TOKEN:
+//            yaml_free(token.data.anchor.value);
+//            break;
+//
+//        case YAML_TAG_TOKEN:
+//            yaml_free(token.data.tag.handle);
+//            yaml_free(token.data.tag.suffix);
+//            break;
+//
+//        case YAML_SCALAR_TOKEN:
+//            yaml_free(token.data.scalar.value);
+//            break;
+//
+//        default:
+//            break;
+//    }
+//
+//    memset(token, 0, sizeof(yaml_token_t));
+//}
+//
+///*
+// * Check if a string is a valid UTF-8 sequence.
+// *
+// * Check 'reader.c' for more details on UTF-8 encoding.
+// */
+//
+//static int
+//yaml_check_utf8(yaml_char_t *start, size_t length)
+//{
+//    yaml_char_t *end = start+length;
+//    yaml_char_t *pointer = start;
+//
+//    while (pointer < end) {
+//        unsigned char octet;
+//        unsigned int width;
+//        unsigned int value;
+//        size_t k;
+//
+//        octet = pointer[0];
+//        width = (octet & 0x80) == 0x00 ? 1 :
+//                (octet & 0xE0) == 0xC0 ? 2 :
+//                (octet & 0xF0) == 0xE0 ? 3 :
+//                (octet & 0xF8) == 0xF0 ? 4 : 0;
+//        value = (octet & 0x80) == 0x00 ? octet & 0x7F :
+//                (octet & 0xE0) == 0xC0 ? octet & 0x1F :
+//                (octet & 0xF0) == 0xE0 ? octet & 0x0F :
+//                (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
+//        if (!width) return 0;
+//        if (pointer+width > end) return 0;
+//        for (k = 1; k < width; k ++) {
+//            octet = pointer[k];
+//            if ((octet & 0xC0) != 0x80) return 0;
+//            value = (value << 6) + (octet & 0x3F);
+//        }
+//        if (!((width == 1) ||
+//            (width == 2 && value >= 0x80) ||
+//            (width == 3 && value >= 0x800) ||
+//            (width == 4 && value >= 0x10000))) return 0;
+//
+//        pointer += width;
+//    }
+//
+//    return 1;
+//}
+//
+
+// Create STREAM-START.
+func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
+	*event = yaml_event_t{
+		typ:      yaml_STREAM_START_EVENT,
+		encoding: encoding,
+	}
+	return true
+}
+
+// Create STREAM-END.
+func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
+	*event = yaml_event_t{
+		typ: yaml_STREAM_END_EVENT,
+	}
+	return true
+}
+
+// Create DOCUMENT-START.
+func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
+	tag_directives []yaml_tag_directive_t, implicit bool) bool {
+	*event = yaml_event_t{
+		typ:               yaml_DOCUMENT_START_EVENT,
+		version_directive: version_directive,
+		tag_directives:    tag_directives,
+		implicit:          implicit,
+	}
+	return true
+}
+
+// Create DOCUMENT-END.
+func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
+	*event = yaml_event_t{
+		typ:      yaml_DOCUMENT_END_EVENT,
+		implicit: implicit,
+	}
+	return true
+}
+
+///*
+// * Create ALIAS.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
+//{
+//    mark yaml_mark_t = { 0, 0, 0 }
+//    anchor_copy *yaml_char_t = NULL
+//
+//    assert(event) // Non-NULL event object is expected.
+//    assert(anchor) // Non-NULL anchor is expected.
+//
+//    if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
+//
+//    anchor_copy = yaml_strdup(anchor)
+//    if (!anchor_copy)
+//        return 0
+//
+//    ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
+//
+//    return 1
+//}
+
+// Create SCALAR.
+func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
+	*event = yaml_event_t{
+		typ:             yaml_SCALAR_EVENT,
+		anchor:          anchor,
+		tag:             tag,
+		value:           value,
+		implicit:        plain_implicit,
+		quoted_implicit: quoted_implicit,
+		style:           yaml_style_t(style),
+	}
+	return true
+}
+
+// Create SEQUENCE-START.
+func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
+	*event = yaml_event_t{
+		typ:      yaml_SEQUENCE_START_EVENT,
+		anchor:   anchor,
+		tag:      tag,
+		implicit: implicit,
+		style:    yaml_style_t(style),
+	}
+	return true
+}
+
+// Create SEQUENCE-END.
+func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
+	*event = yaml_event_t{
+		typ: yaml_SEQUENCE_END_EVENT,
+	}
+	return true
+}
+
+// Create MAPPING-START.
+func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
+	*event = yaml_event_t{
+		typ:      yaml_MAPPING_START_EVENT,
+		anchor:   anchor,
+		tag:      tag,
+		implicit: implicit,
+		style:    yaml_style_t(style),
+	}
+	return true
+}
+
+// Create MAPPING-END.
+func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
+	*event = yaml_event_t{
+		typ: yaml_MAPPING_END_EVENT,
+	}
+	return true
+}
+
+// Destroy an event object.
+func yaml_event_delete(event *yaml_event_t) {
+	*event = yaml_event_t{}
+}
+
+///*
+// * Create a document object.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_initialize(document *yaml_document_t,
+//        version_directive *yaml_version_directive_t,
+//        tag_directives_start *yaml_tag_directive_t,
+//        tag_directives_end *yaml_tag_directive_t,
+//        start_implicit int, end_implicit int)
+//{
+//    struct {
+//        error yaml_error_type_t
+//    } context
+//    struct {
+//        start *yaml_node_t
+//        end *yaml_node_t
+//        top *yaml_node_t
+//    } nodes = { NULL, NULL, NULL }
+//    version_directive_copy *yaml_version_directive_t = NULL
+//    struct {
+//        start *yaml_tag_directive_t
+//        end *yaml_tag_directive_t
+//        top *yaml_tag_directive_t
+//    } tag_directives_copy = { NULL, NULL, NULL }
+//    value yaml_tag_directive_t = { NULL, NULL }
+//    mark yaml_mark_t = { 0, 0, 0 }
+//
+//    assert(document) // Non-NULL document object is expected.
+//    assert((tag_directives_start && tag_directives_end) ||
+//            (tag_directives_start == tag_directives_end))
+//                            // Valid tag directives are expected.
+//
+//    if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
+//
+//    if (version_directive) {
+//        version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
+//        if (!version_directive_copy) goto error
+//        version_directive_copy.major = version_directive.major
+//        version_directive_copy.minor = version_directive.minor
+//    }
+//
+//    if (tag_directives_start != tag_directives_end) {
+//        tag_directive *yaml_tag_directive_t
+//        if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
+//            goto error
+//        for (tag_directive = tag_directives_start
+//                tag_directive != tag_directives_end; tag_directive ++) {
+//            assert(tag_directive.handle)
+//            assert(tag_directive.prefix)
+//            if (!yaml_check_utf8(tag_directive.handle,
+//                        strlen((char *)tag_directive.handle)))
+//                goto error
+//            if (!yaml_check_utf8(tag_directive.prefix,
+//                        strlen((char *)tag_directive.prefix)))
+//                goto error
+//            value.handle = yaml_strdup(tag_directive.handle)
+//            value.prefix = yaml_strdup(tag_directive.prefix)
+//            if (!value.handle || !value.prefix) goto error
+//            if (!PUSH(&context, tag_directives_copy, value))
+//                goto error
+//            value.handle = NULL
+//            value.prefix = NULL
+//        }
+//    }
+//
+//    DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
+//            tag_directives_copy.start, tag_directives_copy.top,
+//            start_implicit, end_implicit, mark, mark)
+//
+//    return 1
+//
+//error:
+//    STACK_DEL(&context, nodes)
+//    yaml_free(version_directive_copy)
+//    while (!STACK_EMPTY(&context, tag_directives_copy)) {
+//        value yaml_tag_directive_t = POP(&context, tag_directives_copy)
+//        yaml_free(value.handle)
+//        yaml_free(value.prefix)
+//    }
+//    STACK_DEL(&context, tag_directives_copy)
+//    yaml_free(value.handle)
+//    yaml_free(value.prefix)
+//
+//    return 0
+//}
+//
+///*
+// * Destroy a document object.
+// */
+//
+//YAML_DECLARE(void)
+//yaml_document_delete(document *yaml_document_t)
+//{
+//    struct {
+//        error yaml_error_type_t
+//    } context
+//    tag_directive *yaml_tag_directive_t
+//
+//    context.error = YAML_NO_ERROR // Eliminate a compliler warning.
+//
+//    assert(document) // Non-NULL document object is expected.
+//
+//    while (!STACK_EMPTY(&context, document.nodes)) {
+//        node yaml_node_t = POP(&context, document.nodes)
+//        yaml_free(node.tag)
+//        switch (node.type) {
+//            case YAML_SCALAR_NODE:
+//                yaml_free(node.data.scalar.value)
+//                break
+//            case YAML_SEQUENCE_NODE:
+//                STACK_DEL(&context, node.data.sequence.items)
+//                break
+//            case YAML_MAPPING_NODE:
+//                STACK_DEL(&context, node.data.mapping.pairs)
+//                break
+//            default:
+//                assert(0) // Should not happen.
+//        }
+//    }
+//    STACK_DEL(&context, document.nodes)
+//
+//    yaml_free(document.version_directive)
+//    for (tag_directive = document.tag_directives.start
+//            tag_directive != document.tag_directives.end
+//            tag_directive++) {
+//        yaml_free(tag_directive.handle)
+//        yaml_free(tag_directive.prefix)
+//    }
+//    yaml_free(document.tag_directives.start)
+//
+//    memset(document, 0, sizeof(yaml_document_t))
+//}
+//
+///**
+// * Get a document node.
+// */
+//
+//YAML_DECLARE(yaml_node_t *)
+//yaml_document_get_node(document *yaml_document_t, index int)
+//{
+//    assert(document) // Non-NULL document object is expected.
+//
+//    if (index > 0 && document.nodes.start + index <= document.nodes.top) {
+//        return document.nodes.start + index - 1
+//    }
+//    return NULL
+//}
+//
+///**
+// * Get the root object.
+// */
+//
+//YAML_DECLARE(yaml_node_t *)
+//yaml_document_get_root_node(document *yaml_document_t)
+//{
+//    assert(document) // Non-NULL document object is expected.
+//
+//    if (document.nodes.top != document.nodes.start) {
+//        return document.nodes.start
+//    }
+//    return NULL
+//}
+//
+///*
+// * Add a scalar node to a document.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_add_scalar(document *yaml_document_t,
+//        tag *yaml_char_t, value *yaml_char_t, length int,
+//        style yaml_scalar_style_t)
+//{
+//    struct {
+//        error yaml_error_type_t
+//    } context
+//    mark yaml_mark_t = { 0, 0, 0 }
+//    tag_copy *yaml_char_t = NULL
+//    value_copy *yaml_char_t = NULL
+//    node yaml_node_t
+//
+//    assert(document) // Non-NULL document object is expected.
+//    assert(value) // Non-NULL value is expected.
+//
+//    if (!tag) {
+//        tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
+//    }
+//
+//    if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
+//    tag_copy = yaml_strdup(tag)
+//    if (!tag_copy) goto error
+//
+//    if (length < 0) {
+//        length = strlen((char *)value)
+//    }
+//
+//    if (!yaml_check_utf8(value, length)) goto error
+//    value_copy = yaml_malloc(length+1)
+//    if (!value_copy) goto error
+//    memcpy(value_copy, value, length)
+//    value_copy[length] = '\0'
+//
+//    SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
+//    if (!PUSH(&context, document.nodes, node)) goto error
+//
+//    return document.nodes.top - document.nodes.start
+//
+//error:
+//    yaml_free(tag_copy)
+//    yaml_free(value_copy)
+//
+//    return 0
+//}
+//
+///*
+// * Add a sequence node to a document.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_add_sequence(document *yaml_document_t,
+//        tag *yaml_char_t, style yaml_sequence_style_t)
+//{
+//    struct {
+//        error yaml_error_type_t
+//    } context
+//    mark yaml_mark_t = { 0, 0, 0 }
+//    tag_copy *yaml_char_t = NULL
+//    struct {
+//        start *yaml_node_item_t
+//        end *yaml_node_item_t
+//        top *yaml_node_item_t
+//    } items = { NULL, NULL, NULL }
+//    node yaml_node_t
+//
+//    assert(document) // Non-NULL document object is expected.
+//
+//    if (!tag) {
+//        tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
+//    }
+//
+//    if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
+//    tag_copy = yaml_strdup(tag)
+//    if (!tag_copy) goto error
+//
+//    if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
+//
+//    SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
+//            style, mark, mark)
+//    if (!PUSH(&context, document.nodes, node)) goto error
+//
+//    return document.nodes.top - document.nodes.start
+//
+//error:
+//    STACK_DEL(&context, items)
+//    yaml_free(tag_copy)
+//
+//    return 0
+//}
+//
+///*
+// * Add a mapping node to a document.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_add_mapping(document *yaml_document_t,
+//        tag *yaml_char_t, style yaml_mapping_style_t)
+//{
+//    struct {
+//        error yaml_error_type_t
+//    } context
+//    mark yaml_mark_t = { 0, 0, 0 }
+//    tag_copy *yaml_char_t = NULL
+//    struct {
+//        start *yaml_node_pair_t
+//        end *yaml_node_pair_t
+//        top *yaml_node_pair_t
+//    } pairs = { NULL, NULL, NULL }
+//    node yaml_node_t
+//
+//    assert(document) // Non-NULL document object is expected.
+//
+//    if (!tag) {
+//        tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
+//    }
+//
+//    if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
+//    tag_copy = yaml_strdup(tag)
+//    if (!tag_copy) goto error
+//
+//    if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
+//
+//    MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
+//            style, mark, mark)
+//    if (!PUSH(&context, document.nodes, node)) goto error
+//
+//    return document.nodes.top - document.nodes.start
+//
+//error:
+//    STACK_DEL(&context, pairs)
+//    yaml_free(tag_copy)
+//
+//    return 0
+//}
+//
+///*
+// * Append an item to a sequence node.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_append_sequence_item(document *yaml_document_t,
+//        sequence int, item int)
+//{
+//    struct {
+//        error yaml_error_type_t
+//    } context
+//
+//    assert(document) // Non-NULL document is required.
+//    assert(sequence > 0
+//            && document.nodes.start + sequence <= document.nodes.top)
+//                            // Valid sequence id is required.
+//    assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
+//                            // A sequence node is required.
+//    assert(item > 0 && document.nodes.start + item <= document.nodes.top)
+//                            // Valid item id is required.
+//
+//    if (!PUSH(&context,
+//                document.nodes.start[sequence-1].data.sequence.items, item))
+//        return 0
+//
+//    return 1
+//}
+//
+///*
+// * Append a pair of a key and a value to a mapping node.
+// */
+//
+//YAML_DECLARE(int)
+//yaml_document_append_mapping_pair(document *yaml_document_t,
+//        mapping int, key int, value int)
+//{
+//    struct {
+//        error yaml_error_type_t
+//    } context
+//
+//    pair yaml_node_pair_t
+//
+//    assert(document) // Non-NULL document is required.
+//    assert(mapping > 0
+//            && document.nodes.start + mapping <= document.nodes.top)
+//                            // Valid mapping id is required.
+//    assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
+//                            // A mapping node is required.
+//    assert(key > 0 && document.nodes.start + key <= document.nodes.top)
+//                            // Valid key id is required.
+//    assert(value > 0 && document.nodes.start + value <= document.nodes.top)
+//                            // Valid value id is required.
+//
+//    pair.key = key
+//    pair.value = value
+//
+//    if (!PUSH(&context,
+//                document.nodes.start[mapping-1].data.mapping.pairs, pair))
+//        return 0
+//
+//    return 1
+//}
+//
+//

http://git-wip-us.apache.org/repos/asf/incubator-mynewt-newt/blob/e95057f4/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode.go
----------------------------------------------------------------------
diff --git a/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode.go b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode.go
new file mode 100644
index 0000000..085cddc
--- /dev/null
+++ b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode.go
@@ -0,0 +1,683 @@
+package yaml
+
+import (
+	"encoding"
+	"encoding/base64"
+	"fmt"
+	"math"
+	"reflect"
+	"strconv"
+	"time"
+)
+
+const (
+	documentNode = 1 << iota
+	mappingNode
+	sequenceNode
+	scalarNode
+	aliasNode
+)
+
+type node struct {
+	kind         int
+	line, column int
+	tag          string
+	value        string
+	implicit     bool
+	children     []*node
+	anchors      map[string]*node
+}
+
+// ----------------------------------------------------------------------------
+// Parser, produces a node tree out of a libyaml event stream.
+
+type parser struct {
+	parser yaml_parser_t
+	event  yaml_event_t
+	doc    *node
+}
+
+func newParser(b []byte) *parser {
+	p := parser{}
+	if !yaml_parser_initialize(&p.parser) {
+		panic("failed to initialize YAML emitter")
+	}
+
+	if len(b) == 0 {
+		b = []byte{'\n'}
+	}
+
+	yaml_parser_set_input_string(&p.parser, b)
+
+	p.skip()
+	if p.event.typ != yaml_STREAM_START_EVENT {
+		panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
+	}
+	p.skip()
+	return &p
+}
+
+func (p *parser) destroy() {
+	if p.event.typ != yaml_NO_EVENT {
+		yaml_event_delete(&p.event)
+	}
+	yaml_parser_delete(&p.parser)
+}
+
+func (p *parser) skip() {
+	if p.event.typ != yaml_NO_EVENT {
+		if p.event.typ == yaml_STREAM_END_EVENT {
+			failf("attempted to go past the end of stream; corrupted value?")
+		}
+		yaml_event_delete(&p.event)
+	}
+	if !yaml_parser_parse(&p.parser, &p.event) {
+		p.fail()
+	}
+}
+
+func (p *parser) fail() {
+	var where string
+	var line int
+	if p.parser.problem_mark.line != 0 {
+		line = p.parser.problem_mark.line
+	} else if p.parser.context_mark.line != 0 {
+		line = p.parser.context_mark.line
+	}
+	if line != 0 {
+		where = "line " + strconv.Itoa(line) + ": "
+	}
+	var msg string
+	if len(p.parser.problem) > 0 {
+		msg = p.parser.problem
+	} else {
+		msg = "unknown problem parsing YAML content"
+	}
+	failf("%s%s", where, msg)
+}
+
+func (p *parser) anchor(n *node, anchor []byte) {
+	if anchor != nil {
+		p.doc.anchors[string(anchor)] = n
+	}
+}
+
+func (p *parser) parse() *node {
+	switch p.event.typ {
+	case yaml_SCALAR_EVENT:
+		return p.scalar()
+	case yaml_ALIAS_EVENT:
+		return p.alias()
+	case yaml_MAPPING_START_EVENT:
+		return p.mapping()
+	case yaml_SEQUENCE_START_EVENT:
+		return p.sequence()
+	case yaml_DOCUMENT_START_EVENT:
+		return p.document()
+	case yaml_STREAM_END_EVENT:
+		// Happens when attempting to decode an empty buffer.
+		return nil
+	default:
+		panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
+	}
+	panic("unreachable")
+}
+
+func (p *parser) node(kind int) *node {
+	return &node{
+		kind:   kind,
+		line:   p.event.start_mark.line,
+		column: p.event.start_mark.column,
+	}
+}
+
+func (p *parser) document() *node {
+	n := p.node(documentNode)
+	n.anchors = make(map[string]*node)
+	p.doc = n
+	p.skip()
+	n.children = append(n.children, p.parse())
+	if p.event.typ != yaml_DOCUMENT_END_EVENT {
+		panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
+	}
+	p.skip()
+	return n
+}
+
+func (p *parser) alias() *node {
+	n := p.node(aliasNode)
+	n.value = string(p.event.anchor)
+	p.skip()
+	return n
+}
+
+func (p *parser) scalar() *node {
+	n := p.node(scalarNode)
+	n.value = string(p.event.value)
+	n.tag = string(p.event.tag)
+	n.implicit = p.event.implicit
+	p.anchor(n, p.event.anchor)
+	p.skip()
+	return n
+}
+
+func (p *parser) sequence() *node {
+	n := p.node(sequenceNode)
+	p.anchor(n, p.event.anchor)
+	p.skip()
+	for p.event.typ != yaml_SEQUENCE_END_EVENT {
+		n.children = append(n.children, p.parse())
+	}
+	p.skip()
+	return n
+}
+
+func (p *parser) mapping() *node {
+	n := p.node(mappingNode)
+	p.anchor(n, p.event.anchor)
+	p.skip()
+	for p.event.typ != yaml_MAPPING_END_EVENT {
+		n.children = append(n.children, p.parse(), p.parse())
+	}
+	p.skip()
+	return n
+}
+
+// ----------------------------------------------------------------------------
+// Decoder, unmarshals a node into a provided value.
+
+type decoder struct {
+	doc     *node
+	aliases map[string]bool
+	mapType reflect.Type
+	terrors []string
+}
+
+var (
+	mapItemType    = reflect.TypeOf(MapItem{})
+	durationType   = reflect.TypeOf(time.Duration(0))
+	defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
+	ifaceType      = defaultMapType.Elem()
+)
+
+func newDecoder() *decoder {
+	d := &decoder{mapType: defaultMapType}
+	d.aliases = make(map[string]bool)
+	return d
+}
+
+func (d *decoder) terror(n *node, tag string, out reflect.Value) {
+	if n.tag != "" {
+		tag = n.tag
+	}
+	value := n.value
+	if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG {
+		if len(value) > 10 {
+			value = " `" + value[:7] + "...`"
+		} else {
+			value = " `" + value + "`"
+		}
+	}
+	d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type()))
+}
+
+func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
+	terrlen := len(d.terrors)
+	err := u.UnmarshalYAML(func(v interface{}) (err error) {
+		defer handleErr(&err)
+		d.unmarshal(n, reflect.ValueOf(v))
+		if len(d.terrors) > terrlen {
+			issues := d.terrors[terrlen:]
+			d.terrors = d.terrors[:terrlen]
+			return &TypeError{issues}
+		}
+		return nil
+	})
+	if e, ok := err.(*TypeError); ok {
+		d.terrors = append(d.terrors, e.Errors...)
+		return false
+	}
+	if err != nil {
+		fail(err)
+	}
+	return true
+}
+
+// d.prepare initializes and dereferences pointers and calls UnmarshalYAML
+// if a value is found to implement it.
+// It returns the initialized and dereferenced out value, whether
+// unmarshalling was already done by UnmarshalYAML, and if so whether
+// its types unmarshalled appropriately.
+//
+// If n holds a null value, prepare returns before doing anything.
+func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
+	if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "") {
+		return out, false, false
+	}
+	again := true
+	for again {
+		again = false
+		if out.Kind() == reflect.Ptr {
+			if out.IsNil() {
+				out.Set(reflect.New(out.Type().Elem()))
+			}
+			out = out.Elem()
+			again = true
+		}
+		if out.CanAddr() {
+			if u, ok := out.Addr().Interface().(Unmarshaler); ok {
+				good = d.callUnmarshaler(n, u)
+				return out, true, good
+			}
+		}
+	}
+	return out, false, false
+}
+
+func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
+	switch n.kind {
+	case documentNode:
+		return d.document(n, out)
+	case aliasNode:
+		return d.alias(n, out)
+	}
+	out, unmarshaled, good := d.prepare(n, out)
+	if unmarshaled {
+		return good
+	}
+	switch n.kind {
+	case scalarNode:
+		good = d.scalar(n, out)
+	case mappingNode:
+		good = d.mapping(n, out)
+	case sequenceNode:
+		good = d.sequence(n, out)
+	default:
+		panic("internal error: unknown node kind: " + strconv.Itoa(n.kind))
+	}
+	return good
+}
+
+func (d *decoder) document(n *node, out reflect.Value) (good bool) {
+	if len(n.children) == 1 {
+		d.doc = n
+		d.unmarshal(n.children[0], out)
+		return true
+	}
+	return false
+}
+
+func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
+	an, ok := d.doc.anchors[n.value]
+	if !ok {
+		failf("unknown anchor '%s' referenced", n.value)
+	}
+	if d.aliases[n.value] {
+		failf("anchor '%s' value contains itself", n.value)
+	}
+	d.aliases[n.value] = true
+	good = d.unmarshal(an, out)
+	delete(d.aliases, n.value)
+	return good
+}
+
+var zeroValue reflect.Value
+
+func resetMap(out reflect.Value) {
+	for _, k := range out.MapKeys() {
+		out.SetMapIndex(k, zeroValue)
+	}
+}
+
+func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
+	var tag string
+	var resolved interface{}
+	if n.tag == "" && !n.implicit {
+		tag = yaml_STR_TAG
+		resolved = n.value
+	} else {
+		tag, resolved = resolve(n.tag, n.value)
+		if tag == yaml_BINARY_TAG {
+			data, err := base64.StdEncoding.DecodeString(resolved.(string))
+			if err != nil {
+				failf("!!binary value contains invalid base64 data")
+			}
+			resolved = string(data)
+		}
+	}
+	if resolved == nil {
+		if out.Kind() == reflect.Map && !out.CanAddr() {
+			resetMap(out)
+		} else {
+			out.Set(reflect.Zero(out.Type()))
+		}
+		return true
+	}
+	if s, ok := resolved.(string); ok && out.CanAddr() {
+		if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok {
+			err := u.UnmarshalText([]byte(s))
+			if err != nil {
+				fail(err)
+			}
+			return true
+		}
+	}
+	switch out.Kind() {
+	case reflect.String:
+		if tag == yaml_BINARY_TAG {
+			out.SetString(resolved.(string))
+			good = true
+		} else if resolved != nil {
+			out.SetString(n.value)
+			good = true
+		}
+	case reflect.Interface:
+		if resolved == nil {
+			out.Set(reflect.Zero(out.Type()))
+		} else {
+			out.Set(reflect.ValueOf(resolved))
+		}
+		good = true
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		switch resolved := resolved.(type) {
+		case int:
+			if !out.OverflowInt(int64(resolved)) {
+				out.SetInt(int64(resolved))
+				good = true
+			}
+		case int64:
+			if !out.OverflowInt(resolved) {
+				out.SetInt(resolved)
+				good = true
+			}
+		case uint64:
+			if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
+				out.SetInt(int64(resolved))
+				good = true
+			}
+		case float64:
+			if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
+				out.SetInt(int64(resolved))
+				good = true
+			}
+		case string:
+			if out.Type() == durationType {
+				d, err := time.ParseDuration(resolved)
+				if err == nil {
+					out.SetInt(int64(d))
+					good = true
+				}
+			}
+		}
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+		switch resolved := resolved.(type) {
+		case int:
+			if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
+				out.SetUint(uint64(resolved))
+				good = true
+			}
+		case int64:
+			if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
+				out.SetUint(uint64(resolved))
+				good = true
+			}
+		case uint64:
+			if !out.OverflowUint(uint64(resolved)) {
+				out.SetUint(uint64(resolved))
+				good = true
+			}
+		case float64:
+			if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
+				out.SetUint(uint64(resolved))
+				good = true
+			}
+		}
+	case reflect.Bool:
+		switch resolved := resolved.(type) {
+		case bool:
+			out.SetBool(resolved)
+			good = true
+		}
+	case reflect.Float32, reflect.Float64:
+		switch resolved := resolved.(type) {
+		case int:
+			out.SetFloat(float64(resolved))
+			good = true
+		case int64:
+			out.SetFloat(float64(resolved))
+			good = true
+		case uint64:
+			out.SetFloat(float64(resolved))
+			good = true
+		case float64:
+			out.SetFloat(resolved)
+			good = true
+		}
+	case reflect.Ptr:
+		if out.Type().Elem() == reflect.TypeOf(resolved) {
+			// TODO DOes this make sense? When is out a Ptr except when decoding a nil value?
+			elem := reflect.New(out.Type().Elem())
+			elem.Elem().Set(reflect.ValueOf(resolved))
+			out.Set(elem)
+			good = true
+		}
+	}
+	if !good {
+		d.terror(n, tag, out)
+	}
+	return good
+}
+
+func settableValueOf(i interface{}) reflect.Value {
+	v := reflect.ValueOf(i)
+	sv := reflect.New(v.Type()).Elem()
+	sv.Set(v)
+	return sv
+}
+
+func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
+	l := len(n.children)
+
+	var iface reflect.Value
+	switch out.Kind() {
+	case reflect.Slice:
+		out.Set(reflect.MakeSlice(out.Type(), l, l))
+	case reflect.Interface:
+		// No type hints. Will have to use a generic sequence.
+		iface = out
+		out = settableValueOf(make([]interface{}, l))
+	default:
+		d.terror(n, yaml_SEQ_TAG, out)
+		return false
+	}
+	et := out.Type().Elem()
+
+	j := 0
+	for i := 0; i < l; i++ {
+		e := reflect.New(et).Elem()
+		if ok := d.unmarshal(n.children[i], e); ok {
+			out.Index(j).Set(e)
+			j++
+		}
+	}
+	out.Set(out.Slice(0, j))
+	if iface.IsValid() {
+		iface.Set(out)
+	}
+	return true
+}
+
+func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
+	switch out.Kind() {
+	case reflect.Struct:
+		return d.mappingStruct(n, out)
+	case reflect.Slice:
+		return d.mappingSlice(n, out)
+	case reflect.Map:
+		// okay
+	case reflect.Interface:
+		if d.mapType.Kind() == reflect.Map {
+			iface := out
+			out = reflect.MakeMap(d.mapType)
+			iface.Set(out)
+		} else {
+			slicev := reflect.New(d.mapType).Elem()
+			if !d.mappingSlice(n, slicev) {
+				return false
+			}
+			out.Set(slicev)
+			return true
+		}
+	default:
+		d.terror(n, yaml_MAP_TAG, out)
+		return false
+	}
+	outt := out.Type()
+	kt := outt.Key()
+	et := outt.Elem()
+
+	mapType := d.mapType
+	if outt.Key() == ifaceType && outt.Elem() == ifaceType {
+		d.mapType = outt
+	}
+
+	if out.IsNil() {
+		out.Set(reflect.MakeMap(outt))
+	}
+	l := len(n.children)
+	for i := 0; i < l; i += 2 {
+		if isMerge(n.children[i]) {
+			d.merge(n.children[i+1], out)
+			continue
+		}
+		k := reflect.New(kt).Elem()
+		if d.unmarshal(n.children[i], k) {
+			kkind := k.Kind()
+			if kkind == reflect.Interface {
+				kkind = k.Elem().Kind()
+			}
+			if kkind == reflect.Map || kkind == reflect.Slice {
+				failf("invalid map key: %#v", k.Interface())
+			}
+			e := reflect.New(et).Elem()
+			if d.unmarshal(n.children[i+1], e) {
+				out.SetMapIndex(k, e)
+			}
+		}
+	}
+	d.mapType = mapType
+	return true
+}
+
+func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
+	outt := out.Type()
+	if outt.Elem() != mapItemType {
+		d.terror(n, yaml_MAP_TAG, out)
+		return false
+	}
+
+	mapType := d.mapType
+	d.mapType = outt
+
+	var slice []MapItem
+	var l = len(n.children)
+	for i := 0; i < l; i += 2 {
+		if isMerge(n.children[i]) {
+			d.merge(n.children[i+1], out)
+			continue
+		}
+		item := MapItem{}
+		k := reflect.ValueOf(&item.Key).Elem()
+		if d.unmarshal(n.children[i], k) {
+			v := reflect.ValueOf(&item.Value).Elem()
+			if d.unmarshal(n.children[i+1], v) {
+				slice = append(slice, item)
+			}
+		}
+	}
+	out.Set(reflect.ValueOf(slice))
+	d.mapType = mapType
+	return true
+}
+
+func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
+	sinfo, err := getStructInfo(out.Type())
+	if err != nil {
+		panic(err)
+	}
+	name := settableValueOf("")
+	l := len(n.children)
+
+	var inlineMap reflect.Value
+	var elemType reflect.Type
+	if sinfo.InlineMap != -1 {
+		inlineMap = out.Field(sinfo.InlineMap)
+		inlineMap.Set(reflect.New(inlineMap.Type()).Elem())
+		elemType = inlineMap.Type().Elem()
+	}
+
+	for i := 0; i < l; i += 2 {
+		ni := n.children[i]
+		if isMerge(ni) {
+			d.merge(n.children[i+1], out)
+			continue
+		}
+		if !d.unmarshal(ni, name) {
+			continue
+		}
+		if info, ok := sinfo.FieldsMap[name.String()]; ok {
+			var field reflect.Value
+			if info.Inline == nil {
+				field = out.Field(info.Num)
+			} else {
+				field = out.FieldByIndex(info.Inline)
+			}
+			d.unmarshal(n.children[i+1], field)
+		} else if sinfo.InlineMap != -1 {
+			if inlineMap.IsNil() {
+				inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
+			}
+			value := reflect.New(elemType).Elem()
+			d.unmarshal(n.children[i+1], value)
+			inlineMap.SetMapIndex(name, value)
+		}
+	}
+	return true
+}
+
+func failWantMap() {
+	failf("map merge requires map or sequence of maps as the value")
+}
+
+func (d *decoder) merge(n *node, out reflect.Value) {
+	switch n.kind {
+	case mappingNode:
+		d.unmarshal(n, out)
+	case aliasNode:
+		an, ok := d.doc.anchors[n.value]
+		if ok && an.kind != mappingNode {
+			failWantMap()
+		}
+		d.unmarshal(n, out)
+	case sequenceNode:
+		// Step backwards as earlier nodes take precedence.
+		for i := len(n.children) - 1; i >= 0; i-- {
+			ni := n.children[i]
+			if ni.kind == aliasNode {
+				an, ok := d.doc.anchors[ni.value]
+				if ok && an.kind != mappingNode {
+					failWantMap()
+				}
+			} else if ni.kind != mappingNode {
+				failWantMap()
+			}
+			d.unmarshal(ni, out)
+		}
+	default:
+		failWantMap()
+	}
+}
+
+func isMerge(n *node) bool {
+	return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
+}

http://git-wip-us.apache.org/repos/asf/incubator-mynewt-newt/blob/e95057f4/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode_test.go
----------------------------------------------------------------------
diff --git a/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode_test.go b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode_test.go
new file mode 100644
index 0000000..04fdd9e
--- /dev/null
+++ b/newt/Godeps/_workspace/src/gopkg.in/yaml.v2/decode_test.go
@@ -0,0 +1,966 @@
+package yaml_test
+
+import (
+	"errors"
+	. "gopkg.in/check.v1"
+	"gopkg.in/yaml.v2"
+	"math"
+	"net"
+	"reflect"
+	"strings"
+	"time"
+)
+
+var unmarshalIntTest = 123
+
+var unmarshalTests = []struct {
+	data  string
+	value interface{}
+}{
+	{
+		"",
+		&struct{}{},
+	}, {
+		"{}", &struct{}{},
+	}, {
+		"v: hi",
+		map[string]string{"v": "hi"},
+	}, {
+		"v: hi", map[string]interface{}{"v": "hi"},
+	}, {
+		"v: true",
+		map[string]string{"v": "true"},
+	}, {
+		"v: true",
+		map[string]interface{}{"v": true},
+	}, {
+		"v: 10",
+		map[string]interface{}{"v": 10},
+	}, {
+		"v: 0b10",
+		map[string]interface{}{"v": 2},
+	}, {
+		"v: 0xA",
+		map[string]interface{}{"v": 10},
+	}, {
+		"v: 4294967296",
+		map[string]int64{"v": 4294967296},
+	}, {
+		"v: 0.1",
+		map[string]interface{}{"v": 0.1},
+	}, {
+		"v: .1",
+		map[string]interface{}{"v": 0.1},
+	}, {
+		"v: .Inf",
+		map[string]interface{}{"v": math.Inf(+1)},
+	}, {
+		"v: -.Inf",
+		map[string]interface{}{"v": math.Inf(-1)},
+	}, {
+		"v: -10",
+		map[string]interface{}{"v": -10},
+	}, {
+		"v: -.1",
+		map[string]interface{}{"v": -0.1},
+	},
+
+	// Simple values.
+	{
+		"123",
+		&unmarshalIntTest,
+	},
+
+	// Floats from spec
+	{
+		"canonical: 6.8523e+5",
+		map[string]interface{}{"canonical": 6.8523e+5},
+	}, {
+		"expo: 685.230_15e+03",
+		map[string]interface{}{"expo": 685.23015e+03},
+	}, {
+		"fixed: 685_230.15",
+		map[string]interface{}{"fixed": 685230.15},
+	}, {
+		"neginf: -.inf",
+		map[string]interface{}{"neginf": math.Inf(-1)},
+	}, {
+		"fixed: 685_230.15",
+		map[string]float64{"fixed": 685230.15},
+	},
+	//{"sexa: 190:20:30.15", map[string]interface{}{"sexa": 0}}, // Unsupported
+	//{"notanum: .NaN", map[string]interface{}{"notanum": math.NaN()}}, // Equality of NaN fails.
+
+	// Bools from spec
+	{
+		"canonical: y",
+		map[string]interface{}{"canonical": true},
+	}, {
+		"answer: NO",
+		map[string]interface{}{"answer": false},
+	}, {
+		"logical: True",
+		map[string]interface{}{"logical": true},
+	}, {
+		"option: on",
+		map[string]interface{}{"option": true},
+	}, {
+		"option: on",
+		map[string]bool{"option": true},
+	},
+	// Ints from spec
+	{
+		"canonical: 685230",
+		map[string]interface{}{"canonical": 685230},
+	}, {
+		"decimal: +685_230",
+		map[string]interface{}{"decimal": 685230},
+	}, {
+		"octal: 02472256",
+		map[string]interface{}{"octal": 685230},
+	}, {
+		"hexa: 0x_0A_74_AE",
+		map[string]interface{}{"hexa": 685230},
+	}, {
+		"bin: 0b1010_0111_0100_1010_1110",
+		map[string]interface{}{"bin": 685230},
+	}, {
+		"bin: -0b101010",
+		map[string]interface{}{"bin": -42},
+	}, {
+		"decimal: +685_230",
+		map[string]int{"decimal": 685230},
+	},
+
+	//{"sexa: 190:20:30", map[string]interface{}{"sexa": 0}}, // Unsupported
+
+	// Nulls from spec
+	{
+		"empty:",
+		map[string]interface{}{"empty": nil},
+	}, {
+		"canonical: ~",
+		map[string]interface{}{"canonical": nil},
+	}, {
+		"english: null",
+		map[string]interface{}{"english": nil},
+	}, {
+		"~: null key",
+		map[interface{}]string{nil: "null key"},
+	}, {
+		"empty:",
+		map[string]*bool{"empty": nil},
+	},
+
+	// Flow sequence
+	{
+		"seq: [A,B]",
+		map[string]interface{}{"seq": []interface{}{"A", "B"}},
+	}, {
+		"seq: [A,B,C,]",
+		map[string][]string{"seq": []string{"A", "B", "C"}},
+	}, {
+		"seq: [A,1,C]",
+		map[string][]string{"seq": []string{"A", "1", "C"}},
+	}, {
+		"seq: [A,1,C]",
+		map[string][]int{"seq": []int{1}},
+	}, {
+		"seq: [A,1,C]",
+		map[string]interface{}{"seq": []interface{}{"A", 1, "C"}},
+	},
+	// Block sequence
+	{
+		"seq:\n - A\n - B",
+		map[string]interface{}{"seq": []interface{}{"A", "B"}},
+	}, {
+		"seq:\n - A\n - B\n - C",
+		map[string][]string{"seq": []string{"A", "B", "C"}},
+	}, {
+		"seq:\n - A\n - 1\n - C",
+		map[string][]string{"seq": []string{"A", "1", "C"}},
+	}, {
+		"seq:\n - A\n - 1\n - C",
+		map[string][]int{"seq": []int{1}},
+	}, {
+		"seq:\n - A\n - 1\n - C",
+		map[string]interface{}{"seq": []interface{}{"A", 1, "C"}},
+	},
+
+	// Literal block scalar
+	{
+		"scalar: | # Comment\n\n literal\n\n \ttext\n\n",
+		map[string]string{"scalar": "\nliteral\n\n\ttext\n"},
+	},
+
+	// Folded block scalar
+	{
+		"scalar: > # Comment\n\n folded\n line\n \n next\n line\n  * one\n  * two\n\n last\n line\n\n",
+		map[string]string{"scalar": "\nfolded line\nnext line\n * one\n * two\n\nlast line\n"},
+	},
+
+	// Map inside interface with no type hints.
+	{
+		"a: {b: c}",
+		map[interface{}]interface{}{"a": map[interface{}]interface{}{"b": "c"}},
+	},
+
+	// Structs and type conversions.
+	{
+		"hello: world",
+		&struct{ Hello string }{"world"},
+	}, {
+		"a: {b: c}",
+		&struct{ A struct{ B string } }{struct{ B string }{"c"}},
+	}, {
+		"a: {b: c}",
+		&struct{ A *struct{ B string } }{&struct{ B string }{"c"}},
+	}, {
+		"a: {b: c}",
+		&struct{ A map[string]string }{map[string]string{"b": "c"}},
+	}, {
+		"a: {b: c}",
+		&struct{ A *map[string]string }{&map[string]string{"b": "c"}},
+	}, {
+		"a:",
+		&struct{ A map[string]string }{},
+	}, {
+		"a: 1",
+		&struct{ A int }{1},
+	}, {
+		"a: 1",
+		&struct{ A float64 }{1},
+	}, {
+		"a: 1.0",
+		&struct{ A int }{1},
+	}, {
+		"a: 1.0",
+		&struct{ A uint }{1},
+	}, {
+		"a: [1, 2]",
+		&struct{ A []int }{[]int{1, 2}},
+	}, {
+		"a: 1",
+		&struct{ B int }{0},
+	}, {
+		"a: 1",
+		&struct {
+			B int "a"
+		}{1},
+	}, {
+		"a: y",
+		&struct{ A bool }{true},
+	},
+
+	// Some cross type conversions
+	{
+		"v: 42",
+		map[string]uint{"v": 42},
+	}, {
+		"v: -42",
+		map[string]uint{},
+	}, {
+		"v: 4294967296",
+		map[string]uint64{"v": 4294967296},
+	}, {
+		"v: -4294967296",
+		map[string]uint64{},
+	},
+
+	// int
+	{
+		"int_max: 2147483647",
+		map[string]int{"int_max": math.MaxInt32},
+	},
+	{
+		"int_min: -2147483648",
+		map[string]int{"int_min": math.MinInt32},
+	},
+	{
+		"int_overflow: 9223372036854775808", // math.MaxInt64 + 1
+		map[string]int{},
+	},
+
+	// int64
+	{
+		"int64_max: 9223372036854775807",
+		map[string]int64{"int64_max": math.MaxInt64},
+	},
+	{
+		"int64_max_base2: 0b111111111111111111111111111111111111111111111111111111111111111",
+		map[string]int64{"int64_max_base2": math.MaxInt64},
+	},
+	{
+		"int64_min: -9223372036854775808",
+		map[string]int64{"int64_min": math.MinInt64},
+	},
+	{
+		"int64_neg_base2: -0b111111111111111111111111111111111111111111111111111111111111111",
+		map[string]int64{"int64_neg_base2": -math.MaxInt64},
+	},
+	{
+		"int64_overflow: 9223372036854775808", // math.MaxInt64 + 1
+		map[string]int64{},
+	},
+
+	// uint
+	{
+		"uint_min: 0",
+		map[string]uint{"uint_min": 0},
+	},
+	{
+		"uint_max: 4294967295",
+		map[string]uint{"uint_max": math.MaxUint32},
+	},
+	{
+		"uint_underflow: -1",
+		map[string]uint{},
+	},
+
+	// uint64
+	{
+		"uint64_min: 0",
+		map[string]uint{"uint64_min": 0},
+	},
+	{
+		"uint64_max: 18446744073709551615",
+		map[string]uint64{"uint64_max": math.MaxUint64},
+	},
+	{
+		"uint64_max_base2: 0b1111111111111111111111111111111111111111111111111111111111111111",
+		map[string]uint64{"uint64_max_base2": math.MaxUint64},
+	},
+	{
+		"uint64_maxint64: 9223372036854775807",
+		map[string]uint64{"uint64_maxint64": math.MaxInt64},
+	},
+	{
+		"uint64_underflow: -1",
+		map[string]uint64{},
+	},
+
+	// float32
+	{
+		"float32_max: 3.40282346638528859811704183484516925440e+38",
+		map[string]float32{"float32_max": math.MaxFloat32},
+	},
+	{
+		"float32_nonzero: 1.401298464324817070923729583289916131280e-45",
+		map[string]float32{"float32_nonzero": math.SmallestNonzeroFloat32},
+	},
+	{
+		"float32_maxuint64: 18446744073709551615",
+		map[string]float32{"float32_maxuint64": float32(math.MaxUint64)},
+	},
+	{
+		"float32_maxuint64+1: 18446744073709551616",
+		map[string]float32{"float32_maxuint64+1": float32(math.MaxUint64 + 1)},
+	},
+
+	// float64
+	{
+		"float64_max: 1.797693134862315708145274237317043567981e+308",
+		map[string]float64{"float64_max": math.MaxFloat64},
+	},
+	{
+		"float64_nonzero: 4.940656458412465441765687928682213723651e-324",
+		map[string]float64{"float64_nonzero": math.SmallestNonzeroFloat64},
+	},
+	{
+		"float64_maxuint64: 18446744073709551615",
+		map[string]float64{"float64_maxuint64": float64(math.MaxUint64)},
+	},
+	{
+		"float64_maxuint64+1: 18446744073709551616",
+		map[string]float64{"float64_maxuint64+1": float64(math.MaxUint64 + 1)},
+	},
+
+	// Overflow cases.
+	{
+		"v: 4294967297",
+		map[string]int32{},
+	}, {
+		"v: 128",
+		map[string]int8{},
+	},
+
+	// Quoted values.
+	{
+		"'1': '\"2\"'",
+		map[interface{}]interface{}{"1": "\"2\""},
+	}, {
+		"v:\n- A\n- 'B\n\n  C'\n",
+		map[string][]string{"v": []string{"A", "B\nC"}},
+	},
+
+	// Explicit tags.
+	{
+		"v: !!float '1.1'",
+		map[string]interface{}{"v": 1.1},
+	}, {
+		"v: !!null ''",
+		map[string]interface{}{"v": nil},
+	}, {
+		"%TAG !y! tag:yaml.org,2002:\n---\nv: !y!int '1'",
+		map[string]interface{}{"v": 1},
+	},
+
+	// Anchors and aliases.
+	{
+		"a: &x 1\nb: &y 2\nc: *x\nd: *y\n",
+		&struct{ A, B, C, D int }{1, 2, 1, 2},
+	}, {
+		"a: &a {c: 1}\nb: *a",
+		&struct {
+			A, B struct {
+				C int
+			}
+		}{struct{ C int }{1}, struct{ C int }{1}},
+	}, {
+		"a: &a [1, 2]\nb: *a",
+		&struct{ B []int }{[]int{1, 2}},
+	}, {
+		"b: *a\na: &a {c: 1}",
+		&struct {
+			A, B struct {
+				C int
+			}
+		}{struct{ C int }{1}, struct{ C int }{1}},
+	},
+
+	// Bug #1133337
+	{
+		"foo: ''",
+		map[string]*string{"foo": new(string)},
+	}, {
+		"foo: null",
+		map[string]string{"foo": ""},
+	}, {
+		"foo: null",
+		map[string]interface{}{"foo": nil},
+	},
+
+	// Ignored field
+	{
+		"a: 1\nb: 2\n",
+		&struct {
+			A int
+			B int "-"
+		}{1, 0},
+	},
+
+	// Bug #1191981
+	{
+		"" +
+			"%YAML 1.1\n" +
+			"--- !!str\n" +
+			`"Generic line break (no glyph)\n\` + "\n" +
+			` Generic line break (glyphed)\n\` + "\n" +
+			` Line separator\u2028\` + "\n" +
+			` Paragraph separator\u2029"` + "\n",
+		"" +
+			"Generic line break (no glyph)\n" +
+			"Generic line break (glyphed)\n" +
+			"Line separator\u2028Paragraph separator\u2029",
+	},
+
+	// Struct inlining
+	{
+		"a: 1\nb: 2\nc: 3\n",
+		&struct {
+			A int
+			C inlineB `yaml:",inline"`
+		}{1, inlineB{2, inlineC{3}}},
+	},
+
+	// Map inlining
+	{
+		"a: 1\nb: 2\nc: 3\n",
+		&struct {
+			A int
+			C map[string]int `yaml:",inline"`
+		}{1, map[string]int{"b": 2, "c": 3}},
+	},
+
+	// bug 1243827
+	{
+		"a: -b_c",
+		map[string]interface{}{"a": "-b_c"},
+	},
+	{
+		"a: +b_c",
+		map[string]interface{}{"a": "+b_c"},
+	},
+	{
+		"a: 50cent_of_dollar",
+		map[string]interface{}{"a": "50cent_of_dollar"},
+	},
+
+	// Duration
+	{
+		"a: 3s",
+		map[string]time.Duration{"a": 3 * time.Second},
+	},
+
+	// Issue #24.
+	{
+		"a: <foo>",
+		map[string]string{"a": "<foo>"},
+	},
+
+	// Base 60 floats are obsolete and unsupported.
+	{
+		"a: 1:1\n",
+		map[string]string{"a": "1:1"},
+	},
+
+	// Binary data.
+	{
+		"a: !!binary gIGC\n",
+		map[string]string{"a": "\x80\x81\x82"},
+	}, {
+		"a: !!binary |\n  " + strings.Repeat("kJCQ", 17) + "kJ\n  CQ\n",
+		map[string]string{"a": strings.Repeat("\x90", 54)},
+	}, {
+		"a: !!binary |\n  " + strings.Repeat("A", 70) + "\n  ==\n",
+		map[string]string{"a": strings.Repeat("\x00", 52)},
+	},
+
+	// Ordered maps.
+	{
+		"{b: 2, a: 1, d: 4, c: 3, sub: {e: 5}}",
+		&yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}},
+	},
+
+	// Issue #39.
+	{
+		"a:\n b:\n  c: d\n",
+		map[string]struct{ B interface{} }{"a": {map[interface{}]interface{}{"c": "d"}}},
+	},
+
+	// Custom map type.
+	{
+		"a: {b: c}",
+		M{"a": M{"b": "c"}},
+	},
+
+	// Support encoding.TextUnmarshaler.
+	{
+		"a: 1.2.3.4\n",
+		map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)},
+	},
+	{
+		"a: 2015-02-24T18:19:39Z\n",
+		map[string]time.Time{"a": time.Unix(1424801979, 0)},
+	},
+
+	// Encode empty lists as zero-length slices.
+	{
+		"a: []",
+		&struct{ A []int }{[]int{}},
+	},
+}
+
+type M map[interface{}]interface{}
+
+type inlineB struct {
+	B       int
+	inlineC `yaml:",inline"`
+}
+
+type inlineC struct {
+	C int
+}
+
+func (s *S) TestUnmarshal(c *C) {
+	for _, item := range unmarshalTests {
+		t := reflect.ValueOf(item.value).Type()
+		var value interface{}
+		switch t.Kind() {
+		case reflect.Map:
+			value = reflect.MakeMap(t).Interface()
+		case reflect.String:
+			value = reflect.New(t).Interface()
+		case reflect.Ptr:
+			value = reflect.New(t.Elem()).Interface()
+		default:
+			c.Fatalf("missing case for %s", t)
+		}
+		err := yaml.Unmarshal([]byte(item.data), value)
+		if _, ok := err.(*yaml.TypeError); !ok {
+			c.Assert(err, IsNil)
+		}
+		if t.Kind() == reflect.String {
+			c.Assert(*value.(*string), Equals, item.value)
+		} else {
+			c.Assert(value, DeepEquals, item.value)
+		}
+	}
+}
+
+func (s *S) TestUnmarshalNaN(c *C) {
+	value := map[string]interface{}{}
+	err := yaml.Unmarshal([]byte("notanum: .NaN"), &value)
+	c.Assert(err, IsNil)
+	c.Assert(math.IsNaN(value["notanum"].(float64)), Equals, true)
+}
+
+var unmarshalErrorTests = []struct {
+	data, error string
+}{
+	{"v: !!float 'error'", "yaml: cannot decode !!str `error` as a !!float"},
+	{"v: [A,", "yaml: line 1: did not find expected node content"},
+	{"v:\n- [A,", "yaml: line 2: did not find expected node content"},
+	{"a: *b\n", "yaml: unknown anchor 'b' referenced"},
+	{"a: &a\n  b: *a\n", "yaml: anchor 'a' value contains itself"},
+	{"value: -", "yaml: block sequence entries are not allowed in this context"},
+	{"a: !!binary ==", "yaml: !!binary value contains invalid base64 data"},
+	{"{[.]}", `yaml: invalid map key: \[\]interface \{\}\{"\."\}`},
+	{"{{.}}", `yaml: invalid map key: map\[interface\ \{\}\]interface \{\}\{".":interface \{\}\(nil\)\}`},
+}
+
+func (s *S) TestUnmarshalErrors(c *C) {
+	for _, item := range unmarshalErrorTests {
+		var value interface{}
+		err := yaml.Unmarshal([]byte(item.data), &value)
+		c.Assert(err, ErrorMatches, item.error, Commentf("Partial unmarshal: %#v", value))
+	}
+}
+
+var unmarshalerTests = []struct {
+	data, tag string
+	value     interface{}
+}{
+	{"_: {hi: there}", "!!map", map[interface{}]interface{}{"hi": "there"}},
+	{"_: [1,A]", "!!seq", []interface{}{1, "A"}},
+	{"_: 10", "!!int", 10},
+	{"_: null", "!!null", nil},
+	{`_: BAR!`, "!!str", "BAR!"},
+	{`_: "BAR!"`, "!!str", "BAR!"},
+	{"_: !!foo 'BAR!'", "!!foo", "BAR!"},
+}
+
+var unmarshalerResult = map[int]error{}
+
+type unmarshalerType struct {
+	value interface{}
+}
+
+func (o *unmarshalerType) UnmarshalYAML(unmarshal func(v interface{}) error) error {
+	if err := unmarshal(&o.value); err != nil {
+		return err
+	}
+	if i, ok := o.value.(int); ok {
+		if result, ok := unmarshalerResult[i]; ok {
+			return result
+		}
+	}
+	return nil
+}
+
+type unmarshalerPointer struct {
+	Field *unmarshalerType "_"
+}
+
+type unmarshalerValue struct {
+	Field unmarshalerType "_"
+}
+
+func (s *S) TestUnmarshalerPointerField(c *C) {
+	for _, item := range unmarshalerTests {
+		obj := &unmarshalerPointer{}
+		err := yaml.Unmarshal([]byte(item.data), obj)
+		c.Assert(err, IsNil)
+		if item.value == nil {
+			c.Assert(obj.Field, IsNil)
+		} else {
+			c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value))
+			c.Assert(obj.Field.value, DeepEquals, item.value)
+		}
+	}
+}
+
+func (s *S) TestUnmarshalerValueField(c *C) {
+	for _, item := range unmarshalerTests {
+		obj := &unmarshalerValue{}
+		err := yaml.Unmarshal([]byte(item.data), obj)
+		c.Assert(err, IsNil)
+		c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value))
+		c.Assert(obj.Field.value, DeepEquals, item.value)
+	}
+}
+
+func (s *S) TestUnmarshalerWholeDocument(c *C) {
+	obj := &unmarshalerType{}
+	err := yaml.Unmarshal([]byte(unmarshalerTests[0].data), obj)
+	c.Assert(err, IsNil)
+	value, ok := obj.value.(map[interface{}]interface{})
+	c.Assert(ok, Equals, true, Commentf("value: %#v", obj.value))
+	c.Assert(value["_"], DeepEquals, unmarshalerTests[0].value)
+}
+
+func (s *S) TestUnmarshalerTypeError(c *C) {
+	unmarshalerResult[2] = &yaml.TypeError{[]string{"foo"}}
+	unmarshalerResult[4] = &yaml.TypeError{[]string{"bar"}}
+	defer func() {
+		delete(unmarshalerResult, 2)
+		delete(unmarshalerResult, 4)
+	}()
+
+	type T struct {
+		Before int
+		After  int
+		M      map[string]*unmarshalerType
+	}
+	var v T
+	data := `{before: A, m: {abc: 1, def: 2, ghi: 3, jkl: 4}, after: B}`
+	err := yaml.Unmarshal([]byte(data), &v)
+	c.Assert(err, ErrorMatches, ""+
+		"yaml: unmarshal errors:\n"+
+		"  line 1: cannot unmarshal !!str `A` into int\n"+
+		"  foo\n"+
+		"  bar\n"+
+		"  line 1: cannot unmarshal !!str `B` into int")
+	c.Assert(v.M["abc"], NotNil)
+	c.Assert(v.M["def"], IsNil)
+	c.Assert(v.M["ghi"], NotNil)
+	c.Assert(v.M["jkl"], IsNil)
+
+	c.Assert(v.M["abc"].value, Equals, 1)
+	c.Assert(v.M["ghi"].value, Equals, 3)
+}
+
+type proxyTypeError struct{}
+
+func (v *proxyTypeError) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	var s string
+	var a int32
+	var b int64
+	if err := unmarshal(&s); err != nil {
+		panic(err)
+	}
+	if s == "a" {
+		if err := unmarshal(&b); err == nil {
+			panic("should have failed")
+		}
+		return unmarshal(&a)
+	}
+	if err := unmarshal(&a); err == nil {
+		panic("should have failed")
+	}
+	return unmarshal(&b)
+}
+
+func (s *S) TestUnmarshalerTypeErrorProxying(c *C) {
+	type T struct {
+		Before int
+		After  int
+		M      map[string]*proxyTypeError
+	}
+	var v T
+	data := `{before: A, m: {abc: a, def: b}, after: B}`
+	err := yaml.Unmarshal([]byte(data), &v)
+	c.Assert(err, ErrorMatches, ""+
+		"yaml: unmarshal errors:\n"+
+		"  line 1: cannot unmarshal !!str `A` into int\n"+
+		"  line 1: cannot unmarshal !!str `a` into int32\n"+
+		"  line 1: cannot unmarshal !!str `b` into int64\n"+
+		"  line 1: cannot unmarshal !!str `B` into int")
+}
+
+type failingUnmarshaler struct{}
+
+var failingErr = errors.New("failingErr")
+
+func (ft *failingUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	return failingErr
+}
+
+func (s *S) TestUnmarshalerError(c *C) {
+	err := yaml.Unmarshal([]byte("a: b"), &failingUnmarshaler{})
+	c.Assert(err, Equals, failingErr)
+}
+
+type sliceUnmarshaler []int
+
+func (su *sliceUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	var slice []int
+	err := unmarshal(&slice)
+	if err == nil {
+		*su = slice
+		return nil
+	}
+
+	var intVal int
+	err = unmarshal(&intVal)
+	if err == nil {
+		*su = []int{intVal}
+		return nil
+	}
+
+	return err
+}
+
+func (s *S) TestUnmarshalerRetry(c *C) {
+	var su sliceUnmarshaler
+	err := yaml.Unmarshal([]byte("[1, 2, 3]"), &su)
+	c.Assert(err, IsNil)
+	c.Assert(su, DeepEquals, sliceUnmarshaler([]int{1, 2, 3}))
+
+	err = yaml.Unmarshal([]byte("1"), &su)
+	c.Assert(err, IsNil)
+	c.Assert(su, DeepEquals, sliceUnmarshaler([]int{1}))
+}
+
+// From http://yaml.org/type/merge.html
+var mergeTests = `
+anchors:
+  list:
+    - &CENTER { "x": 1, "y": 2 }
+    - &LEFT   { "x": 0, "y": 2 }
+    - &BIG    { "r": 10 }
+    - &SMALL  { "r": 1 }
+
+# All the following maps are equal:
+
+plain:
+  # Explicit keys
+  "x": 1
+  "y": 2
+  "r": 10
+  label: center/big
+
+mergeOne:
+  # Merge one map
+  << : *CENTER
+  "r": 10
+  label: center/big
+
+mergeMultiple:
+  # Merge multiple maps
+  << : [ *CENTER, *BIG ]
+  label: center/big
+
+override:
+  # Override
+  << : [ *BIG, *LEFT, *SMALL ]
+  "x": 1
+  label: center/big
+
+shortTag:
+  # Explicit short merge tag
+  !!merge "<<" : [ *CENTER, *BIG ]
+  label: center/big
+
+longTag:
+  # Explicit merge long tag
+  !<tag:yaml.org,2002:merge> "<<" : [ *CENTER, *BIG ]
+  label: center/big
+
+inlineMap:
+  # Inlined map 
+  << : {"x": 1, "y": 2, "r": 10}
+  label: center/big
+
+inlineSequenceMap:
+  # Inlined map in sequence
+  << : [ *CENTER, {"r": 10} ]
+  label: center/big
+`
+
+func (s *S) TestMerge(c *C) {
+	var want = map[interface{}]interface{}{
+		"x":     1,
+		"y":     2,
+		"r":     10,
+		"label": "center/big",
+	}
+
+	var m map[interface{}]interface{}
+	err := yaml.Unmarshal([]byte(mergeTests), &m)
+	c.Assert(err, IsNil)
+	for name, test := range m {
+		if name == "anchors" {
+			continue
+		}
+		c.Assert(test, DeepEquals, want, Commentf("test %q failed", name))
+	}
+}
+
+func (s *S) TestMergeStruct(c *C) {
+	type Data struct {
+		X, Y, R int
+		Label   string
+	}
+	want := Data{1, 2, 10, "center/big"}
+
+	var m map[string]Data
+	err := yaml.Unmarshal([]byte(mergeTests), &m)
+	c.Assert(err, IsNil)
+	for name, test := range m {
+		if name == "anchors" {
+			continue
+		}
+		c.Assert(test, Equals, want, Commentf("test %q failed", name))
+	}
+}
+
+var unmarshalNullTests = []func() interface{}{
+	func() interface{} { var v interface{}; v = "v"; return &v },
+	func() interface{} { var s = "s"; return &s },
+	func() interface{} { var s = "s"; sptr := &s; return &sptr },
+	func() interface{} { var i = 1; return &i },
+	func() interface{} { var i = 1; iptr := &i; return &iptr },
+	func() interface{} { m := map[string]int{"s": 1}; return &m },
+	func() interface{} { m := map[string]int{"s": 1}; return m },
+}
+
+func (s *S) TestUnmarshalNull(c *C) {
+	for _, test := range unmarshalNullTests {
+		item := test()
+		zero := reflect.Zero(reflect.TypeOf(item).Elem()).Interface()
+		err := yaml.Unmarshal([]byte("null"), item)
+		c.Assert(err, IsNil)
+		if reflect.TypeOf(item).Kind() == reflect.Map {
+			c.Assert(reflect.ValueOf(item).Interface(), DeepEquals, reflect.MakeMap(reflect.TypeOf(item)).Interface())
+		} else {
+			c.Assert(reflect.ValueOf(item).Elem().Interface(), DeepEquals, zero)
+		}
+	}
+}
+
+func (s *S) TestUnmarshalSliceOnPreset(c *C) {
+	// Issue #48.
+	v := struct{ A []int }{[]int{1}}
+	yaml.Unmarshal([]byte("a: [2]"), &v)
+	c.Assert(v.A, DeepEquals, []int{2})
+}
+
+//var data []byte
+//func init() {
+//	var err error
+//	data, err = ioutil.ReadFile("/tmp/file.yaml")
+//	if err != nil {
+//		panic(err)
+//	}
+//}
+//
+//func (s *S) BenchmarkUnmarshal(c *C) {
+//	var err error
+//	for i := 0; i < c.N; i++ {
+//		var v map[string]interface{}
+//		err = yaml.Unmarshal(data, &v)
+//	}
+//	if err != nil {
+//		panic(err)
+//	}
+//}
+//
+//func (s *S) BenchmarkMarshal(c *C) {
+//	var v map[string]interface{}
+//	yaml.Unmarshal(data, &v)
+//	c.ResetTimer()
+//	for i := 0; i < c.N; i++ {
+//		yaml.Marshal(&v)
+//	}
+//}