You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by ga...@apache.org on 2015/10/14 12:09:43 UTC
[46/52] [partial] couchdb-nmo git commit: prepare for release
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic.json
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic.json b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic.json
new file mode 100644
index 0000000..950dff9
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic.json
@@ -0,0 +1,167 @@
+[
+ {
+ },
+ {
+ "image": [
+ {"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5e+1, "y": 0.5, "z": 0.8e-0, "w": 0.5e5, "u": 2E10, "foo": 2E+1, "bar": 2E-0, "width": 47, "height": 47}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]}
+ ],
+ "solid": {
+ "1": [2,4],
+ "2": [1],
+ "3": [2],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": false,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]}
+ ],
+ "solid": {
+ "1": [2],
+ "2": [3],
+ "3": [2,6],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": true,"9": false}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [3],
+ "7": [4,8],
+ "8": [7],
+ "9": [6,8]
+ },
+ "corners": {"1": false,"3": true,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [1],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [9],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": false,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [6,2],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [9],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": false,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [4,2],
+ "2": [],
+ "3": [2,6],
+ "4": [7],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": true,"9": false}
+ },
+ {
+ "image": [
+ {"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18}
+ ],
+ "item": true
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]}
+ ],
+ "jumpable": 3
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [1],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [3],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": false,"3": false,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47},
+ {"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]},
+ {"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]},
+ {"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]},
+ {"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4}
+ ]
+ }
+]
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic2.json
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic2.json b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic2.json
new file mode 100644
index 0000000..3a6919b
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic2.json
@@ -0,0 +1,180 @@
+[
+ {
+ },
+ {
+ "image": [
+ {"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5, "y": 0.5, "width": 47, "height": 47}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]}
+ ],
+ "solid": {
+ "1": [2,4],
+ "2": [1],
+ "3": [2],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": false,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]}
+ ],
+ "solid": {
+ "1": [2],
+ "2": [3],
+ "3": [2,6],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": true,"9": false}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [3],
+ "7": [4,8],
+ "8": [7],
+ "9": [6,8]
+ },
+ "corners": {"1": false,"3": true,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [1],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [9],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": false,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [6,2],
+ "4": [],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [9],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": false,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [4,2],
+ "2": [],
+ "3": [2,6],
+ "4": [7],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": true,"3": true,"7": true,"9": false}
+ },
+ {
+ "image": [
+ {"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18}
+ ],
+ "item": true
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]}
+ ],
+ "jumpable": 3
+ },
+ {
+ "image": [
+ {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]}
+ ],
+ "jumpable": 3,
+ "solid": {
+ "1": [2,4],
+ "2": [],
+ "3": [2,6],
+ "4": [1],
+ "5": [2,8,1,3,7,9,4,6],
+ "6": [3],
+ "7": [4,8],
+ "8": [],
+ "9": [6,8]
+ },
+ "corners": {"1": false,"3": false,"7": true,"9": true}
+ },
+ {
+ "image": [
+ {"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47},
+ {"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]},
+ {"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]},
+ {"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]},
+ {"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4}
+ ],
+ "item": true
+ },
+ {
+ "image": [
+ {"shape": "circle", "fill": "#80f", "stroke": "#88f", "cx": 24, "cy": 24, "r": 18}
+ ],
+ "item": true
+ },
+ {
+ "image": [
+ {"shape": "circle", "fill": "#4f4", "stroke": "#8f8", "cx": 24, "cy": 24, "r": 18}
+ ],
+ "item": true
+ }
+]
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/boundary.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/boundary.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/boundary.js
new file mode 100644
index 0000000..6671f5f
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/boundary.js
@@ -0,0 +1,110 @@
+var test = require('tape');
+var Parser = require('../');
+
+test('2 byte utf8 \'De\' character: д', function (t) {
+ t.plan(1);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, 'д');
+ };
+
+ var de_buffer = new Buffer([0xd0, 0xb4]);
+
+ p.write('"');
+ p.write(de_buffer);
+ p.write('"');
+
+});
+
+test('3 byte utf8 \'Han\' character: 我', function (t) {
+ t.plan(1);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, '我');
+ };
+
+ var han_buffer = new Buffer([0xe6, 0x88, 0x91]);
+ p.write('"');
+ p.write(han_buffer);
+ p.write('"');
+});
+
+test('4 byte utf8 character (unicode scalar U+2070E): 𠜎', function (t) {
+ t.plan(1);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, '𠜎');
+ };
+
+ var Ux2070E_buffer = new Buffer([0xf0, 0xa0, 0x9c, 0x8e]);
+ p.write('"');
+ p.write(Ux2070E_buffer);
+ p.write('"');
+});
+
+test('3 byte utf8 \'Han\' character chunked inbetween 2nd and 3rd byte: 我', function (t) {
+ t.plan(1);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, '我');
+ };
+
+ var han_buffer_first = new Buffer([0xe6, 0x88]);
+ var han_buffer_second = new Buffer([0x91]);
+ p.write('"');
+ p.write(han_buffer_first);
+ p.write(han_buffer_second);
+ p.write('"');
+});
+
+test('4 byte utf8 character (unicode scalar U+2070E) chunked inbetween 2nd and 3rd byte: 𠜎', function (t) {
+ t.plan(1);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, '𠜎');
+ };
+
+ var Ux2070E_buffer_first = new Buffer([0xf0, 0xa0]);
+ var Ux2070E_buffer_second = new Buffer([0x9c, 0x8e]);
+ p.write('"');
+ p.write(Ux2070E_buffer_first);
+ p.write(Ux2070E_buffer_second);
+ p.write('"');
+});
+
+test('1-4 byte utf8 character string chunked inbetween random bytes: Aж文𠜱B', function (t) {
+ t.plan(1);
+
+var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, 'Aж文𠜱B');
+ };
+
+ var eclectic_buffer = new Buffer([0x41, // A
+ 0xd0, 0xb6, // ж
+ 0xe6, 0x96, 0x87, // 文
+ 0xf0, 0xa0, 0x9c, 0xb1, // 𠜱
+ 0x42]); // B
+
+ var rand_chunk = Math.floor(Math.random() * (eclectic_buffer.length));
+ var first_buffer = eclectic_buffer.slice(0, rand_chunk);
+ var second_buffer = eclectic_buffer.slice(rand_chunk);
+
+ //console.log('eclectic_buffer: ' + eclectic_buffer)
+ //console.log('sliced from 0 to ' + rand_chunk);
+ //console.log(first_buffer);
+ //console.log('then sliced from ' + rand_chunk + ' to the end');
+ //console.log(second_buffer);
+
+ console.log('chunked after offset ' + rand_chunk);
+ p.write('"');
+ p.write(first_buffer);
+ p.write(second_buffer);
+ p.write('"');
+
+});
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/offset.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/offset.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/offset.js
new file mode 100644
index 0000000..9a552ab
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/offset.js
@@ -0,0 +1,67 @@
+var test = require('tape');
+var Parser = require('../');
+
+var input = '{\n "string": "value",\n "number": 3,\n "object"';
+var input2 = ': {\n "key": "vд"\n },\n "array": [\n -1,\n 12\n ]\n ';
+var input3 = '"null": null, "true": true, "false": false, "frac": 3.14 }';
+
+var offsets = [
+ [ 0, Parser.C.LEFT_BRACE ],
+ [ 4, Parser.C.STRING ],
+ [ 12, Parser.C.COLON ],
+ [ 14, Parser.C.STRING ],
+ [ 21, Parser.C.COMMA ],
+ [ 25, Parser.C.STRING ],
+ [ 33, Parser.C.COLON ],
+ [ 35, Parser.C.NUMBER ],
+ [ 36, Parser.C.COMMA ],
+ [ 40, Parser.C.STRING ],
+ [ 48, Parser.C.COLON ],
+ [ 50, Parser.C.LEFT_BRACE ],
+ [ 54, Parser.C.STRING ],
+ [ 59, Parser.C.COLON ],
+ [ 61, Parser.C.STRING ],
+ [ 69, Parser.C.RIGHT_BRACE ],
+ [ 70, Parser.C.COMMA ],
+ [ 74, Parser.C.STRING ],
+ [ 81, Parser.C.COLON ],
+ [ 83, Parser.C.LEFT_BRACKET ],
+ [ 87, Parser.C.NUMBER ],
+ [ 89, Parser.C.COMMA ],
+ [ 93, Parser.C.NUMBER ],
+ [ 98, Parser.C.RIGHT_BRACKET ],
+ [ 102, Parser.C.STRING ],
+ [ 108, Parser.C.COLON ],
+ [ 110, Parser.C.NULL ],
+ [ 114, Parser.C.COMMA ],
+ [ 116, Parser.C.STRING ],
+ [ 122, Parser.C.COLON ],
+ [ 124, Parser.C.TRUE ],
+ [ 128, Parser.C.COMMA ],
+ [ 130, Parser.C.STRING ],
+ [ 137, Parser.C.COLON ],
+ [ 139, Parser.C.FALSE ],
+ [ 144, Parser.C.COMMA ],
+ [ 146, Parser.C.STRING ],
+ [ 152, Parser.C.COLON ],
+ [ 154, Parser.C.NUMBER ],
+ [ 159, Parser.C.RIGHT_BRACE ]
+];
+
+test('offset', function(t) {
+ t.plan(offsets.length * 2 + 1);
+
+ var p = new Parser();
+ var i = 0;
+ p.onToken = function (token) {
+ t.equal(p.offset, offsets[i][0]);
+ t.equal(token, offsets[i][1]);
+ i++;
+ };
+
+ p.write(input);
+ p.write(input2);
+ p.write(input3);
+
+ t.equal(i, offsets.length);
+});
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/primitives.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/primitives.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/primitives.js
new file mode 100644
index 0000000..33cae16
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/primitives.js
@@ -0,0 +1,57 @@
+var test = require('tape');
+var Parser = require('../');
+
+var expected = [
+ [ [], '' ],
+ [ [], 'Hello' ],
+ [ [], 'This"is' ],
+ [ [], '\r\n\f\t\\/"' ],
+ [ [], 'Λάμβδα' ],
+ [ [], '\\' ],
+ [ [], '/' ],
+ [ [], '"' ],
+ [ [ 0 ], 0 ],
+ [ [ 1 ], 1 ],
+ [ [ 2 ], -1 ],
+ [ [], [ 0, 1, -1 ] ],
+ [ [ 0 ], 1 ],
+ [ [ 1 ], 1.1 ],
+ [ [ 2 ], -1.1 ],
+ [ [ 3 ], -1 ],
+ [ [], [ 1, 1.1, -1.1, -1 ] ],
+ [ [ 0 ], -1 ],
+ [ [], [ -1 ] ],
+ [ [ 0 ], -0.1 ],
+ [ [], [ -0.1 ] ],
+ [ [ 0 ], 6.02e+23 ],
+ [ [], [ 6.02e+23 ] ],
+ [ [ 0 ], '7161093205057351174' ],
+ [ [], [ '7161093205057351174'] ]
+];
+
+test('primitives', function (t) {
+ t.plan(25);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ var keys = this.stack
+ .slice(1)
+ .map(function (item) { return item.key })
+ .concat(this.key !== undefined ? this.key : [])
+ ;
+ t.deepEqual(
+ [ keys, value ],
+ expected.shift()
+ );
+ };
+
+ p.write('"""Hello""This\\"is""\\r\\n\\f\\t\\\\\\/\\""');
+ p.write('"\\u039b\\u03ac\\u03bc\\u03b2\\u03b4\\u03b1"');
+ p.write('"\\\\"');
+ p.write('"\\/"');
+ p.write('"\\""');
+ p.write('[0,1,-1]');
+ p.write('[1.0,1.1,-1.1,-1.0][-1][-0.1]');
+ p.write('[6.02e23]');
+ p.write('[7161093205057351174]');
+});
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/unvalid.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/unvalid.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/unvalid.js
new file mode 100644
index 0000000..7715cc0
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/unvalid.js
@@ -0,0 +1,15 @@
+var test = require('tape');
+var Parser = require('../');
+
+test('unvalid', function (t) {
+ var count = 0;
+
+ var p = new Parser();
+ p.onError = function (value) {
+ count++;
+ t.equal(1, count);
+ t.end();
+ };
+
+ p.write('{"test": eer[');
+});
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/utf8.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/utf8.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/utf8.js
new file mode 100644
index 0000000..6cb842f
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/jsonparse/test/utf8.js
@@ -0,0 +1,38 @@
+var test = require('tape');
+var Parser = require('../');
+
+test('3 bytes of utf8', function (t) {
+ t.plan(1);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, '├──');
+ };
+
+ p.write('"├──"');
+});
+
+test('utf8 snowman', function (t) {
+ t.plan(1);
+
+ var p = new Parser();
+ p.onValue = function (value) {
+ t.equal(value, '☃');
+ };
+
+ p.write('"☃"');
+});
+
+test('utf8 with regular ascii', function (t) {
+ t.plan(4);
+
+ var p = new Parser();
+ var expected = [ "snow: ☃!", "xyz", "¡que!" ];
+ expected.push(expected.slice());
+
+ p.onValue = function (value) {
+ t.deepEqual(value, expected.shift());
+ };
+
+ p.write('["snow: ☃!","xyz","¡que!"]');
+});
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/.travis.yml
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/.travis.yml b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/.travis.yml
new file mode 100644
index 0000000..c693a93
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/.travis.yml
@@ -0,0 +1,5 @@
+language: node_js
+node_js:
+ - 0.6
+ - 0.8
+ - "0.10"
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.APACHE2
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.APACHE2 b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.APACHE2
new file mode 100644
index 0000000..6366c04
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.APACHE2
@@ -0,0 +1,15 @@
+Apache License, Version 2.0
+
+Copyright (c) 2011 Dominic Tarr
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.MIT
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.MIT b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.MIT
new file mode 100644
index 0000000..6eafbd7
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/LICENSE.MIT
@@ -0,0 +1,24 @@
+The MIT License
+
+Copyright (c) 2011 Dominic Tarr
+
+Permission is hereby granted, free of charge,
+to any person obtaining a copy of this software and
+associated documentation files (the "Software"), to
+deal in the Software without restriction, including
+without limitation the rights to use, copy, modify,
+merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom
+the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/index.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/index.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/index.js
new file mode 100644
index 0000000..ca5fc59
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/index.js
@@ -0,0 +1,108 @@
+var Stream = require('stream')
+
+// through
+//
+// a stream that does nothing but re-emit the input.
+// useful for aggregating a series of changing but not ending streams into one stream)
+
+exports = module.exports = through
+through.through = through
+
+//create a readable writable stream.
+
+function through (write, end, opts) {
+ write = write || function (data) { this.queue(data) }
+ end = end || function () { this.queue(null) }
+
+ var ended = false, destroyed = false, buffer = [], _ended = false
+ var stream = new Stream()
+ stream.readable = stream.writable = true
+ stream.paused = false
+
+// stream.autoPause = !(opts && opts.autoPause === false)
+ stream.autoDestroy = !(opts && opts.autoDestroy === false)
+
+ stream.write = function (data) {
+ write.call(this, data)
+ return !stream.paused
+ }
+
+ function drain() {
+ while(buffer.length && !stream.paused) {
+ var data = buffer.shift()
+ if(null === data)
+ return stream.emit('end')
+ else
+ stream.emit('data', data)
+ }
+ }
+
+ stream.queue = stream.push = function (data) {
+// console.error(ended)
+ if(_ended) return stream
+ if(data === null) _ended = true
+ buffer.push(data)
+ drain()
+ return stream
+ }
+
+ //this will be registered as the first 'end' listener
+ //must call destroy next tick, to make sure we're after any
+ //stream piped from here.
+ //this is only a problem if end is not emitted synchronously.
+ //a nicer way to do this is to make sure this is the last listener for 'end'
+
+ stream.on('end', function () {
+ stream.readable = false
+ if(!stream.writable && stream.autoDestroy)
+ process.nextTick(function () {
+ stream.destroy()
+ })
+ })
+
+ function _end () {
+ stream.writable = false
+ end.call(stream)
+ if(!stream.readable && stream.autoDestroy)
+ stream.destroy()
+ }
+
+ stream.end = function (data) {
+ if(ended) return
+ ended = true
+ if(arguments.length) stream.write(data)
+ _end() // will emit or queue
+ return stream
+ }
+
+ stream.destroy = function () {
+ if(destroyed) return
+ destroyed = true
+ ended = true
+ buffer.length = 0
+ stream.writable = stream.readable = false
+ stream.emit('close')
+ return stream
+ }
+
+ stream.pause = function () {
+ if(stream.paused) return
+ stream.paused = true
+ return stream
+ }
+
+ stream.resume = function () {
+ if(stream.paused) {
+ stream.paused = false
+ stream.emit('resume')
+ }
+ drain()
+ //may have become paused again,
+ //as drain emits 'data'.
+ if(!stream.paused)
+ stream.emit('drain')
+ return stream
+ }
+ return stream
+}
+
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/package.json
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/package.json b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/package.json
new file mode 100644
index 0000000..85acfbf
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/package.json
@@ -0,0 +1,66 @@
+{
+ "name": "through",
+ "version": "2.3.8",
+ "description": "simplified stream construction",
+ "main": "index.js",
+ "scripts": {
+ "test": "set -e; for t in test/*.js; do node $t; done"
+ },
+ "devDependencies": {
+ "stream-spec": "~0.3.5",
+ "tape": "~2.3.2",
+ "from": "~0.1.3"
+ },
+ "keywords": [
+ "stream",
+ "streams",
+ "user-streams",
+ "pipe"
+ ],
+ "author": {
+ "name": "Dominic Tarr",
+ "email": "dominic.tarr@gmail.com",
+ "url": "dominictarr.com"
+ },
+ "license": "MIT",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/dominictarr/through.git"
+ },
+ "homepage": "https://github.com/dominictarr/through",
+ "testling": {
+ "browsers": [
+ "ie/8..latest",
+ "ff/15..latest",
+ "chrome/20..latest",
+ "safari/5.1..latest"
+ ],
+ "files": "test/*.js"
+ },
+ "gitHead": "2c5a6f9a0cc54da759b6e10964f2081c358e49dc",
+ "bugs": {
+ "url": "https://github.com/dominictarr/through/issues"
+ },
+ "_id": "through@2.3.8",
+ "_shasum": "0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5",
+ "_from": "through@>=2.2.7 <3.0.0",
+ "_npmVersion": "2.12.0",
+ "_nodeVersion": "2.3.1",
+ "_npmUser": {
+ "name": "dominictarr",
+ "email": "dominic.tarr@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "dominictarr",
+ "email": "dominic.tarr@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5",
+ "tarball": "http://registry.npmjs.org/through/-/through-2.3.8.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+ "readme": "ERROR: No README data found!"
+}
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/readme.markdown
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/readme.markdown b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/readme.markdown
new file mode 100644
index 0000000..cb34c81
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/readme.markdown
@@ -0,0 +1,64 @@
+#through
+
+[![build status](https://secure.travis-ci.org/dominictarr/through.png)](http://travis-ci.org/dominictarr/through)
+[![testling badge](https://ci.testling.com/dominictarr/through.png)](https://ci.testling.com/dominictarr/through)
+
+Easy way to create a `Stream` that is both `readable` and `writable`.
+
+* Pass in optional `write` and `end` methods.
+* `through` takes care of pause/resume logic if you use `this.queue(data)` instead of `this.emit('data', data)`.
+* Use `this.pause()` and `this.resume()` to manage flow.
+* Check `this.paused` to see current flow state. (`write` always returns `!this.paused`).
+
+This function is the basis for most of the synchronous streams in
+[event-stream](http://github.com/dominictarr/event-stream).
+
+``` js
+var through = require('through')
+
+through(function write(data) {
+ this.queue(data) //data *must* not be null
+ },
+ function end () { //optional
+ this.queue(null)
+ })
+```
+
+Or, can also be used _without_ buffering on pause, use `this.emit('data', data)`,
+and this.emit('end')
+
+``` js
+var through = require('through')
+
+through(function write(data) {
+ this.emit('data', data)
+ //this.pause()
+ },
+ function end () { //optional
+ this.emit('end')
+ })
+```
+
+## Extended Options
+
+You will probably not need these 99% of the time.
+
+### autoDestroy=false
+
+By default, `through` emits close when the writable
+and readable side of the stream has ended.
+If that is not desired, set `autoDestroy=false`.
+
+``` js
+var through = require('through')
+
+//like this
+var ts = through(write, end, {autoDestroy: false})
+//or like this
+var ts = through(write, end)
+ts.autoDestroy = false
+```
+
+## License
+
+MIT / Apache2
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/async.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/async.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/async.js
new file mode 100644
index 0000000..46bdbae
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/async.js
@@ -0,0 +1,28 @@
+var from = require('from')
+var through = require('../')
+
+var tape = require('tape')
+
+tape('simple async example', function (t) {
+
+ var n = 0, expected = [1,2,3,4,5], actual = []
+ from(expected)
+ .pipe(through(function(data) {
+ this.pause()
+ n ++
+ setTimeout(function(){
+ console.log('pushing data', data)
+ this.push(data)
+ this.resume()
+ }.bind(this), 300)
+ })).pipe(through(function(data) {
+ console.log('pushing data second time', data);
+ this.push(data)
+ })).on('data', function (d) {
+ actual.push(d)
+ }).on('end', function() {
+ t.deepEqual(actual, expected)
+ t.end()
+ })
+
+})
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/auto-destroy.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/auto-destroy.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/auto-destroy.js
new file mode 100644
index 0000000..9a8fd00
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/auto-destroy.js
@@ -0,0 +1,30 @@
+var test = require('tape')
+var through = require('../')
+
+// must emit end before close.
+
+test('end before close', function (assert) {
+ var ts = through()
+ ts.autoDestroy = false
+ var ended = false, closed = false
+
+ ts.on('end', function () {
+ assert.ok(!closed)
+ ended = true
+ })
+ ts.on('close', function () {
+ assert.ok(ended)
+ closed = true
+ })
+
+ ts.write(1)
+ ts.write(2)
+ ts.write(3)
+ ts.end()
+ assert.ok(ended)
+ assert.notOk(closed)
+ ts.destroy()
+ assert.ok(closed)
+ assert.end()
+})
+
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/buffering.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/buffering.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/buffering.js
new file mode 100644
index 0000000..b0084bf
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/buffering.js
@@ -0,0 +1,71 @@
+var test = require('tape')
+var through = require('../')
+
+// must emit end before close.
+
+test('buffering', function(assert) {
+ var ts = through(function (data) {
+ this.queue(data)
+ }, function () {
+ this.queue(null)
+ })
+
+ var ended = false, actual = []
+
+ ts.on('data', actual.push.bind(actual))
+ ts.on('end', function () {
+ ended = true
+ })
+
+ ts.write(1)
+ ts.write(2)
+ ts.write(3)
+ assert.deepEqual(actual, [1, 2, 3])
+ ts.pause()
+ ts.write(4)
+ ts.write(5)
+ ts.write(6)
+ assert.deepEqual(actual, [1, 2, 3])
+ ts.resume()
+ assert.deepEqual(actual, [1, 2, 3, 4, 5, 6])
+ ts.pause()
+ ts.end()
+ assert.ok(!ended)
+ ts.resume()
+ assert.ok(ended)
+ assert.end()
+})
+
+test('buffering has data in queue, when ends', function (assert) {
+
+ /*
+ * If stream ends while paused with data in the queue,
+ * stream should still emit end after all data is written
+ * on resume.
+ */
+
+ var ts = through(function (data) {
+ this.queue(data)
+ }, function () {
+ this.queue(null)
+ })
+
+ var ended = false, actual = []
+
+ ts.on('data', actual.push.bind(actual))
+ ts.on('end', function () {
+ ended = true
+ })
+
+ ts.pause()
+ ts.write(1)
+ ts.write(2)
+ ts.write(3)
+ ts.end()
+ assert.deepEqual(actual, [], 'no data written yet, still paused')
+ assert.ok(!ended, 'end not emitted yet, still paused')
+ ts.resume()
+ assert.deepEqual(actual, [1, 2, 3], 'resumed, all data should be delivered')
+ assert.ok(ended, 'end should be emitted once all data was delivered')
+ assert.end();
+})
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/end.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/end.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/end.js
new file mode 100644
index 0000000..fa113f5
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/end.js
@@ -0,0 +1,45 @@
+var test = require('tape')
+var through = require('../')
+
+// must emit end before close.
+
+test('end before close', function (assert) {
+ var ts = through()
+ var ended = false, closed = false
+
+ ts.on('end', function () {
+ assert.ok(!closed)
+ ended = true
+ })
+ ts.on('close', function () {
+ assert.ok(ended)
+ closed = true
+ })
+
+ ts.write(1)
+ ts.write(2)
+ ts.write(3)
+ ts.end()
+ assert.ok(ended)
+ assert.ok(closed)
+ assert.end()
+})
+
+test('end only once', function (t) {
+
+ var ts = through()
+ var ended = false, closed = false
+
+ ts.on('end', function () {
+ t.equal(ended, false)
+ ended = true
+ })
+
+ ts.queue(null)
+ ts.queue(null)
+ ts.queue(null)
+
+ ts.resume()
+
+ t.end()
+})
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/index.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/index.js b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/index.js
new file mode 100644
index 0000000..96da82f
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/node_modules/through/test/index.js
@@ -0,0 +1,133 @@
+
+var test = require('tape')
+var spec = require('stream-spec')
+var through = require('../')
+
+/*
+ I'm using these two functions, and not streams and pipe
+ so there is less to break. if this test fails it must be
+ the implementation of _through_
+*/
+
+function write(array, stream) {
+ array = array.slice()
+ function next() {
+ while(array.length)
+ if(stream.write(array.shift()) === false)
+ return stream.once('drain', next)
+
+ stream.end()
+ }
+
+ next()
+}
+
+function read(stream, callback) {
+ var actual = []
+ stream.on('data', function (data) {
+ actual.push(data)
+ })
+ stream.once('end', function () {
+ callback(null, actual)
+ })
+ stream.once('error', function (err) {
+ callback(err)
+ })
+}
+
+test('simple defaults', function(assert) {
+
+ var l = 1000
+ , expected = []
+
+ while(l--) expected.push(l * Math.random())
+
+ var t = through()
+ var s = spec(t).through().pausable()
+
+ read(t, function (err, actual) {
+ assert.ifError(err)
+ assert.deepEqual(actual, expected)
+ assert.end()
+ })
+
+ t.on('close', s.validate)
+
+ write(expected, t)
+});
+
+test('simple functions', function(assert) {
+
+ var l = 1000
+ , expected = []
+
+ while(l--) expected.push(l * Math.random())
+
+ var t = through(function (data) {
+ this.emit('data', data*2)
+ })
+ var s = spec(t).through().pausable()
+
+
+ read(t, function (err, actual) {
+ assert.ifError(err)
+ assert.deepEqual(actual, expected.map(function (data) {
+ return data*2
+ }))
+ assert.end()
+ })
+
+ t.on('close', s.validate)
+
+ write(expected, t)
+})
+
+test('pauses', function(assert) {
+
+ var l = 1000
+ , expected = []
+
+ while(l--) expected.push(l) //Math.random())
+
+ var t = through()
+
+ var s = spec(t)
+ .through()
+ .pausable()
+
+ t.on('data', function () {
+ if(Math.random() > 0.1) return
+ t.pause()
+ process.nextTick(function () {
+ t.resume()
+ })
+ })
+
+ read(t, function (err, actual) {
+ assert.ifError(err)
+ assert.deepEqual(actual, expected)
+ })
+
+ t.on('close', function () {
+ s.validate()
+ assert.end()
+ })
+
+ write(expected, t)
+})
+
+test('does not soft-end on `undefined`', function(assert) {
+ var stream = through()
+ , count = 0
+
+ stream.on('data', function (data) {
+ count++
+ })
+
+ stream.write(undefined)
+ stream.write(undefined)
+
+ assert.equal(count, 2)
+
+ assert.end()
+})
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/package.json
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/package.json b/node_modules/couchbulkimporter/node_modules/JSONStream/package.json
new file mode 100644
index 0000000..4d094bb
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/package.json
@@ -0,0 +1,72 @@
+{
+ "name": "JSONStream",
+ "version": "1.0.6",
+ "description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
+ "homepage": "http://github.com/dominictarr/JSONStream",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/dominictarr/JSONStream.git"
+ },
+ "license": "(MIT OR Apache-2.0)",
+ "keywords": [
+ "json",
+ "stream",
+ "streaming",
+ "parser",
+ "async",
+ "parsing"
+ ],
+ "dependencies": {
+ "jsonparse": "^1.1.0",
+ "through": ">=2.2.7 <3"
+ },
+ "devDependencies": {
+ "it-is": "~1",
+ "assertions": "~2.2.2",
+ "render": "~0.1.1",
+ "trees": "~0.0.3",
+ "event-stream": "~0.7.0",
+ "tape": "~2.12.3"
+ },
+ "bin": {
+ "JSONStream": "./index.js"
+ },
+ "author": {
+ "name": "Dominic Tarr",
+ "email": "dominic.tarr@gmail.com",
+ "url": "http://bit.ly/dominictarr"
+ },
+ "scripts": {
+ "test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done"
+ },
+ "optionalDependencies": {},
+ "engines": {
+ "node": "*"
+ },
+ "gitHead": "4aef6e50ec4f2bc84fdf2370e43b2e62cd8e534c",
+ "bugs": {
+ "url": "https://github.com/dominictarr/JSONStream/issues"
+ },
+ "_id": "JSONStream@1.0.6",
+ "_shasum": "7fa56d971a69c97b7f9db942f441a68a2187da3a",
+ "_from": "JSONStream@>=1.0.3 <1.1.0",
+ "_npmVersion": "3.3.1",
+ "_nodeVersion": "2.3.1",
+ "_npmUser": {
+ "name": "dominictarr",
+ "email": "dominic.tarr@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "dominictarr",
+ "email": "dominic.tarr@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "7fa56d971a69c97b7f9db942f441a68a2187da3a",
+ "tarball": "http://registry.npmjs.org/JSONStream/-/JSONStream-1.0.6.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.0.6.tgz",
+ "readme": "ERROR: No README data found!"
+}
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown b/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown
new file mode 100644
index 0000000..4a6531a
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/readme.markdown
@@ -0,0 +1,172 @@
+# JSONStream
+
+streaming JSON.parse and stringify
+
+![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
+
+## example
+
+``` js
+
+var request = require('request')
+ , JSONStream = require('JSONStream')
+ , es = require('event-stream')
+
+request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
+ .pipe(JSONStream.parse('rows.*'))
+ .pipe(es.mapSync(function (data) {
+ console.error(data)
+ return data
+ }))
+```
+
+## JSONStream.parse(path)
+
+parse stream of values that match a path
+
+``` js
+ JSONStream.parse('rows.*.doc')
+```
+
+The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at any depth (see examples below).
+
+If your keys have keys that include `.` or `*` etc, use an array instead.
+`['row', true, /^doc/]`.
+
+If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available in array representation, using `{recurse: true}`.
+any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
+
+If `path` is empty or null, no 'data' events are emitted.
+
+### Examples
+
+query a couchdb view:
+
+``` bash
+curl -sS localhost:5984/tests/_all_docs&include_docs=true
+```
+you will get something like this:
+
+``` js
+{"total_rows":129,"offset":0,"rows":[
+ { "id":"change1_0.6995461115147918"
+ , "key":"change1_0.6995461115147918"
+ , "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
+ , "doc":{
+ "_id": "change1_0.6995461115147918"
+ , "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
+ },
+ { "id":"change2_0.6995461115147918"
+ , "key":"change2_0.6995461115147918"
+ , "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
+ , "doc":{
+ "_id":"change2_0.6995461115147918"
+ , "_rev":"1-13677d36b98c0c075145bb8975105153"
+ , "hello":2
+ }
+ },
+]}
+
+```
+
+we are probably most interested in the `rows.*.doc`
+
+create a `Stream` that parses the documents from the feed like this:
+
+``` js
+var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
+
+stream.on('data', function(data) {
+ console.log('received:', data);
+});
+```
+awesome!
+
+### recursive patterns (..)
+
+`JSONStream.parse('docs..value')`
+(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
+will emit every `value` object that is a child, grand-child, etc. of the
+`docs` object. In this example, it will match exactly 5 times at various depth
+levels, emitting 0, 1, 2, 3 and 4 as results.
+
+```js
+{
+ "total": 5,
+ "docs": [
+ {
+ "key": {
+ "value": 0,
+ "some": "property"
+ }
+ },
+ {"value": 1},
+ {"value": 2},
+ {"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
+ {"value": 4}
+ ]
+}
+```
+
+## JSONStream.parse(pattern, map)
+
+provide a function that can be used to map or filter
+the json output. `map` is passed the value at that node of the pattern,
+if `map` return non-nullish (anything but `null` or `undefined`)
+that value will be emitted in the stream. If it returns a nullish value,
+nothing will be emitted.
+
+## JSONStream.stringify(open, sep, close)
+
+Create a writable stream.
+
+you may pass in custom `open`, `close`, and `seperator` strings.
+But, by default, `JSONStream.stringify()` will create an array,
+(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
+
+If you call `JSONStream.stringify(false)`
+the elements will only be seperated by a newline.
+
+If you only write one item this will be valid JSON.
+
+If you write many items,
+you can use a `RegExp` to split it into valid chunks.
+
+## JSONStream.stringifyObject(open, sep, close)
+
+Very much like `JSONStream.stringify`,
+but creates a writable stream for objects instead of arrays.
+
+Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
+
+When you `.write()` to the stream you must supply an array with `[ key, data ]`
+as the first argument.
+
+## unix tool
+
+query npm to see all the modules that browserify has ever depended on.
+
+``` bash
+curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
+```
+
+## numbers
+
+There are occasional problems parsing and unparsing very precise numbers.
+
+I have opened an issue here:
+
+https://github.com/creationix/jsonparse/issues/2
+
++1
+
+## Acknowlegements
+
+this module depends on https://github.com/creationix/jsonparse
+by Tim Caswell
+and also thanks to Florent Jaby for teaching me about parsing with:
+https://github.com/Floby/node-json-streams
+
+## license
+
+Dual-licensed under the MIT License or the Apache License, version 2.0
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js
new file mode 100644
index 0000000..6c386d6
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/test/bool.js
@@ -0,0 +1,41 @@
+
+var fs = require ('fs')
+ , join = require('path').join
+ , file = join(__dirname, 'fixtures','all_npm.json')
+ , JSONStream = require('../')
+ , it = require('it-is').style('colour')
+
+ function randomObj () {
+ return (
+ Math.random () < 0.4
+ ? {hello: 'eonuhckmqjk',
+ whatever: 236515,
+ lies: true,
+ nothing: [null],
+// stuff: [Math.random(),Math.random(),Math.random()]
+ }
+ : ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
+ )
+ }
+
+var expected = []
+ , stringify = JSONStream.stringify()
+ , es = require('event-stream')
+ , stringified = ''
+ , called = 0
+ , count = 10
+ , ended = false
+
+while (count --)
+ expected.push(randomObj())
+
+ es.connect(
+ es.readArray(expected),
+ stringify,
+ JSONStream.parse([true]),
+ es.writeArray(function (err, lines) {
+
+ it(lines).has(expected)
+ console.error('PASSED')
+ })
+ )
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js
new file mode 100644
index 0000000..3c28d49
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/test/browser.js
@@ -0,0 +1,18 @@
+var test = require('tape')
+var JSONStream = require('../')
+var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
+
+test('basic parsing', function (t) {
+ t.plan(2)
+ var parsed = JSONStream.parse("rows.*")
+ var parsedKeys = {}
+ parsed.on('data', function(match) {
+ parsedKeys[Object.keys(match)[0]] = true
+ })
+ parsed.on('end', function() {
+ t.equal(!!parsedKeys['hello'], true)
+ t.equal(!!parsedKeys['foo'], true)
+ })
+ parsed.write(testData)
+ parsed.end()
+})
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js
new file mode 100644
index 0000000..315fdc8
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/test/destroy_missing.js
@@ -0,0 +1,27 @@
+var fs = require ('fs');
+var net = require('net');
+var join = require('path').join;
+var file = join(__dirname, 'fixtures','all_npm.json');
+var JSONStream = require('../');
+
+
+var server = net.createServer(function(client) {
+ var parser = JSONStream.parse([]);
+ parser.on('end', function() {
+ console.log('close')
+ console.error('PASSED');
+ server.close();
+ });
+ client.pipe(parser);
+ var n = 4
+ client.on('data', function () {
+ if(--n) return
+ client.end();
+ })
+});
+server.listen(9999);
+
+
+var client = net.connect({ port : 9999 }, function() {
+ fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
+});
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js
new file mode 100644
index 0000000..78149b9
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot1.js
@@ -0,0 +1,29 @@
+var fs = require ('fs')
+ , join = require('path').join
+ , file = join(__dirname, 'fixtures','all_npm.json')
+ , JSONStream = require('../')
+ , it = require('it-is')
+
+var expected = JSON.parse(fs.readFileSync(file))
+ , parser = JSONStream.parse('rows..rev')
+ , called = 0
+ , ended = false
+ , parsed = []
+
+fs.createReadStream(file).pipe(parser)
+
+parser.on('data', function (data) {
+ called ++
+ parsed.push(data)
+})
+
+parser.on('end', function () {
+ ended = true
+})
+
+process.on('exit', function () {
+ it(called).equal(expected.rows.length)
+ for (var i = 0 ; i < expected.rows.length ; i++)
+ it(parsed[i]).deepEqual(expected.rows[i].value.rev)
+ console.error('PASSED')
+})
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js
new file mode 100644
index 0000000..f99d881
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/test/disabled/doubledot2.js
@@ -0,0 +1,29 @@
+ var fs = require ('fs')
+ , join = require('path').join
+ , file = join(__dirname, 'fixtures','depth.json')
+ , JSONStream = require('../')
+ , it = require('it-is')
+
+ var expected = JSON.parse(fs.readFileSync(file))
+ , parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
+ , called = 0
+ , ended = false
+ , parsed = []
+
+ fs.createReadStream(file).pipe(parser)
+
+ parser.on('data', function (data) {
+ called ++
+ parsed.push(data)
+ })
+
+ parser.on('end', function () {
+ ended = true
+ })
+
+ process.on('exit', function () {
+ it(called).equal(5)
+ for (var i = 0 ; i < 5 ; i++)
+ it(parsed[i]).deepEqual(i)
+ console.error('PASSED')
+ })
http://git-wip-us.apache.org/repos/asf/couchdb-nmo/blob/753f1767/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js
----------------------------------------------------------------------
diff --git a/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js b/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js
new file mode 100644
index 0000000..19e888c
--- /dev/null
+++ b/node_modules/couchbulkimporter/node_modules/JSONStream/test/empty.js
@@ -0,0 +1,44 @@
+var JSONStream = require('../')
+ , stream = require('stream')
+ , it = require('it-is')
+
+var output = [ [], [] ]
+
+var parser1 = JSONStream.parse(['docs', /./])
+parser1.on('data', function(data) {
+ output[0].push(data)
+})
+
+var parser2 = JSONStream.parse(['docs', /./])
+parser2.on('data', function(data) {
+ output[1].push(data)
+})
+
+var pending = 2
+function onend () {
+ if (--pending > 0) return
+ it(output).deepEqual([
+ [], [{hello: 'world'}]
+ ])
+ console.error('PASSED')
+}
+parser1.on('end', onend)
+parser2.on('end', onend)
+
+function makeReadableStream() {
+ var readStream = new stream.Stream()
+ readStream.readable = true
+ readStream.write = function (data) { this.emit('data', data) }
+ readStream.end = function (data) { this.emit('end') }
+ return readStream
+}
+
+var emptyArray = makeReadableStream()
+emptyArray.pipe(parser1)
+emptyArray.write('{"docs":[]}')
+emptyArray.end()
+
+var objectArray = makeReadableStream()
+objectArray.pipe(parser2)
+objectArray.write('{"docs":[{"hello":"world"}]}')
+objectArray.end()