You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by we...@apache.org on 2017/09/08 23:02:04 UTC

[1/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation [Forced Update!]

Repository: arrow
Updated Branches:
  refs/heads/master 9cab3a2f5 -> 0c8853f90 (forced update)


http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/__snapshots__/table-tests.ts.snap
----------------------------------------------------------------------
diff --git a/js/test/__snapshots__/table-tests.ts.snap b/js/test/__snapshots__/table-tests.ts.snap
new file mode 100644
index 0000000..a7fb9c5
--- /dev/null
+++ b/js/test/__snapshots__/table-tests.ts.snap
@@ -0,0 +1,1815 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`dictionary file Arrow Table creates a Table from Arrow buffers 1`] = `"example-csv"`;
+
+exports[`dictionary file Arrow Table creates a Table from Arrow buffers 2`] = `"Struct_"`;
+
+exports[`dictionary file Arrow Table creates a Table from Arrow buffers 3`] = `3`;
+
+exports[`dictionary file Arrow Table creates a Table from Arrow buffers 4`] = `
+Array [
+  "Hermione",
+  25,
+  Float32Array [
+    -53.235599517822266,
+    40.231998443603516,
+  ],
+]
+`;
+
+exports[`dictionary file Arrow Table creates a Table from Arrow buffers 5`] = `
+Array [
+  "Severus",
+  30,
+  Float32Array [
+    -62.22999954223633,
+    3,
+  ],
+]
+`;
+
+exports[`dictionary file Arrow Table creates a Table from Arrow buffers 6`] = `
+Array [
+  "Harry",
+  20,
+  Float32Array [
+    23,
+    -100.23652648925781,
+  ],
+]
+`;
+
+exports[`dictionary file Arrow Table enumerates Table rows 1`] = `
+Object {
+  "example-csv": Array [
+    "Hermione",
+    25,
+    Float32Array [
+      -53.235599517822266,
+      40.231998443603516,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary file Arrow Table enumerates Table rows 2`] = `
+Object {
+  "example-csv": Array [
+    "Severus",
+    30,
+    Float32Array [
+      -62.22999954223633,
+      3,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary file Arrow Table enumerates Table rows 3`] = `
+Object {
+  "example-csv": Array [
+    "Harry",
+    20,
+    Float32Array [
+      23,
+      -100.23652648925781,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary file Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  Array [
+    "Hermione",
+    25,
+    Float32Array [
+      -53.235599517822266,
+      40.231998443603516,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary file Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  Array [
+    "Severus",
+    30,
+    Float32Array [
+      -62.22999954223633,
+      3,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary file Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  Array [
+    "Harry",
+    20,
+    Float32Array [
+      23,
+      -100.23652648925781,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary file Arrow Table toString() prints a pretty Table 1`] = `
+"                                       example-csv
+Hermione,25,-53.235599517822266,40.231998443603516
+                   Severus,30,-62.22999954223633,3
+                   Harry,20,23,-100.23652648925781"
+`;
+
+exports[`dictionary file Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`dictionary file Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,                                        example-csv
+    0, Hermione,25,-53.235599517822266,40.231998443603516
+    1,                    Severus,30,-62.22999954223633,3
+    2,                    Harry,20,23,-100.23652648925781"
+`;
+
+exports[`dictionary stream Arrow Table creates a Table from Arrow buffers 1`] = `"example-csv"`;
+
+exports[`dictionary stream Arrow Table creates a Table from Arrow buffers 2`] = `"Struct_"`;
+
+exports[`dictionary stream Arrow Table creates a Table from Arrow buffers 3`] = `3`;
+
+exports[`dictionary stream Arrow Table creates a Table from Arrow buffers 4`] = `
+Array [
+  "Hermione",
+  25,
+  Float32Array [
+    -53.235599517822266,
+    40.231998443603516,
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow Table creates a Table from Arrow buffers 5`] = `
+Array [
+  "Severus",
+  30,
+  Float32Array [
+    -62.22999954223633,
+    3,
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow Table creates a Table from Arrow buffers 6`] = `
+Array [
+  "Harry",
+  20,
+  Float32Array [
+    23,
+    -100.23652648925781,
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow Table enumerates Table rows 1`] = `
+Object {
+  "example-csv": Array [
+    "Hermione",
+    25,
+    Float32Array [
+      -53.235599517822266,
+      40.231998443603516,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary stream Arrow Table enumerates Table rows 2`] = `
+Object {
+  "example-csv": Array [
+    "Severus",
+    30,
+    Float32Array [
+      -62.22999954223633,
+      3,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary stream Arrow Table enumerates Table rows 3`] = `
+Object {
+  "example-csv": Array [
+    "Harry",
+    20,
+    Float32Array [
+      23,
+      -100.23652648925781,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary stream Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  Array [
+    "Hermione",
+    25,
+    Float32Array [
+      -53.235599517822266,
+      40.231998443603516,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  Array [
+    "Severus",
+    30,
+    Float32Array [
+      -62.22999954223633,
+      3,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  Array [
+    "Harry",
+    20,
+    Float32Array [
+      23,
+      -100.23652648925781,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow Table toString() prints a pretty Table 1`] = `
+"                                       example-csv
+Hermione,25,-53.235599517822266,40.231998443603516
+                   Severus,30,-62.22999954223633,3
+                   Harry,20,23,-100.23652648925781"
+`;
+
+exports[`dictionary stream Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`dictionary stream Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,                                        example-csv
+    0, Hermione,25,-53.235599517822266,40.231998443603516
+    1,                    Severus,30,-62.22999954223633,3
+    2,                    Harry,20,23,-100.23652648925781"
+`;
+
+exports[`dictionary2 file Arrow Table creates a Table from Arrow buffers 1`] = `"struct"`;
+
+exports[`dictionary2 file Arrow Table creates a Table from Arrow buffers 2`] = `"Struct_"`;
+
+exports[`dictionary2 file Arrow Table creates a Table from Arrow buffers 3`] = `2`;
+
+exports[`dictionary2 file Arrow Table creates a Table from Arrow buffers 4`] = `
+Array [
+  "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+  "Airbus",
+  1502880750,
+  Float32Array [
+    32.45663833618164,
+    1.8712350130081177,
+  ],
+]
+`;
+
+exports[`dictionary2 file Arrow Table creates a Table from Arrow buffers 5`] = `
+Array [
+  "50fb46f4-fefa-42c1-919c-0121974cdd00",
+  "Boeing",
+  1502880750,
+  Float32Array [
+    38.766666412353516,
+    -4.181231498718262,
+  ],
+]
+`;
+
+exports[`dictionary2 file Arrow Table enumerates Table rows 1`] = `
+Object {
+  "struct": Array [
+    "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+    "Airbus",
+    1502880750,
+    Float32Array [
+      32.45663833618164,
+      1.8712350130081177,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary2 file Arrow Table enumerates Table rows 2`] = `
+Object {
+  "struct": Array [
+    "50fb46f4-fefa-42c1-919c-0121974cdd00",
+    "Boeing",
+    1502880750,
+    Float32Array [
+      38.766666412353516,
+      -4.181231498718262,
+    ],
+  ],
+}
+`;
+
+exports[`dictionary2 file Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  Array [
+    "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+    "Airbus",
+    1502880750,
+    Float32Array [
+      32.45663833618164,
+      1.8712350130081177,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary2 file Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  Array [
+    "50fb46f4-fefa-42c1-919c-0121974cdd00",
+    "Boeing",
+    1502880750,
+    Float32Array [
+      38.766666412353516,
+      -4.181231498718262,
+    ],
+  ],
+]
+`;
+
+exports[`dictionary2 file Arrow Table toString() prints a pretty Table 1`] = `
+"                                                                                      struct
+ a0fb47f9-f8fb-4403-a64a-786d7611f8ef,Airbus,1502880750,32.45663833618164,1.8712350130081177
+50fb46f4-fefa-42c1-919c-0121974cdd00,Boeing,1502880750,38.766666412353516,-4.181231498718262"
+`;
+
+exports[`dictionary2 file Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`dictionary2 file Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,                                                                                       struct
+    0,  a0fb47f9-f8fb-4403-a64a-786d7611f8ef,Airbus,1502880750,32.45663833618164,1.8712350130081177
+    1, 50fb46f4-fefa-42c1-919c-0121974cdd00,Boeing,1502880750,38.766666412353516,-4.181231498718262"
+`;
+
+exports[`multi_dictionary file Arrow Table creates a Table from Arrow buffers 1`] = `"struct"`;
+
+exports[`multi_dictionary file Arrow Table creates a Table from Arrow buffers 2`] = `"Struct_"`;
+
+exports[`multi_dictionary file Arrow Table creates a Table from Arrow buffers 3`] = `2`;
+
+exports[`multi_dictionary file Arrow Table creates a Table from Arrow buffers 4`] = `
+Array [
+  "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+  "12345",
+  "Airbus",
+  1502880750,
+  Float32Array [
+    32.45663833618164,
+    1.8712350130081177,
+  ],
+]
+`;
+
+exports[`multi_dictionary file Arrow Table creates a Table from Arrow buffers 5`] = `
+Array [
+  "50fb46f4-fefa-42c1-919c-0121974cdd00",
+  "67890",
+  "Boeing",
+  1502880750,
+  Float32Array [
+    38.766666412353516,
+    -4.181231498718262,
+  ],
+]
+`;
+
+exports[`multi_dictionary file Arrow Table enumerates Table rows 1`] = `
+Object {
+  "struct": Array [
+    "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+    "12345",
+    "Airbus",
+    1502880750,
+    Float32Array [
+      32.45663833618164,
+      1.8712350130081177,
+    ],
+  ],
+}
+`;
+
+exports[`multi_dictionary file Arrow Table enumerates Table rows 2`] = `
+Object {
+  "struct": Array [
+    "50fb46f4-fefa-42c1-919c-0121974cdd00",
+    "67890",
+    "Boeing",
+    1502880750,
+    Float32Array [
+      38.766666412353516,
+      -4.181231498718262,
+    ],
+  ],
+}
+`;
+
+exports[`multi_dictionary file Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  Array [
+    "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+    "12345",
+    "Airbus",
+    1502880750,
+    Float32Array [
+      32.45663833618164,
+      1.8712350130081177,
+    ],
+  ],
+]
+`;
+
+exports[`multi_dictionary file Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  Array [
+    "50fb46f4-fefa-42c1-919c-0121974cdd00",
+    "67890",
+    "Boeing",
+    1502880750,
+    Float32Array [
+      38.766666412353516,
+      -4.181231498718262,
+    ],
+  ],
+]
+`;
+
+exports[`multi_dictionary file Arrow Table toString() prints a pretty Table 1`] = `
+"                                                                                            struct
+ a0fb47f9-f8fb-4403-a64a-786d7611f8ef,12345,Airbus,1502880750,32.45663833618164,1.8712350130081177
+50fb46f4-fefa-42c1-919c-0121974cdd00,67890,Boeing,1502880750,38.766666412353516,-4.181231498718262"
+`;
+
+exports[`multi_dictionary file Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`multi_dictionary file Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,                                                                                             struct
+    0,  a0fb47f9-f8fb-4403-a64a-786d7611f8ef,12345,Airbus,1502880750,32.45663833618164,1.8712350130081177
+    1, 50fb46f4-fefa-42c1-919c-0121974cdd00,67890,Boeing,1502880750,38.766666412353516,-4.181231498718262"
+`;
+
+exports[`multipart count Arrow Table creates a Table from Arrow buffers 1`] = `"row_count"`;
+
+exports[`multipart count Arrow Table creates a Table from Arrow buffers 2`] = `"Int"`;
+
+exports[`multipart count Arrow Table creates a Table from Arrow buffers 3`] = `1`;
+
+exports[`multipart count Arrow Table creates a Table from Arrow buffers 4`] = `10000`;
+
+exports[`multipart count Arrow Table enumerates Table rows 1`] = `
+Object {
+  "row_count": 10000,
+}
+`;
+
+exports[`multipart count Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  10000,
+]
+`;
+
+exports[`multipart count Arrow Table toString() prints a pretty Table 1`] = `
+"row_count
+    10000"
+`;
+
+exports[`multipart count Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`multipart count Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index, row_count
+    0,     10000"
+`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 1`] = `"origin_lat"`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 2`] = `"FloatingPoint"`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 3`] = `5`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 4`] = `35.393089294433594`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 5`] = `35.393089294433594`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 6`] = `35.393089294433594`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 7`] = `29.533695220947266`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 8`] = `29.533695220947266`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 9`] = `"origin_lon"`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 10`] = `"FloatingPoint"`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 11`] = `5`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 12`] = `-97.6007308959961`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 13`] = `-97.6007308959961`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 14`] = `-97.6007308959961`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 15`] = `-98.46977996826172`;
+
+exports[`multipart latlong Arrow Table creates a Table from Arrow buffers 16`] = `-98.46977996826172`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows 1`] = `
+Object {
+  "origin_lat": 35.393089294433594,
+  "origin_lon": -97.6007308959961,
+}
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows 2`] = `
+Object {
+  "origin_lat": 35.393089294433594,
+  "origin_lon": -97.6007308959961,
+}
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows 3`] = `
+Object {
+  "origin_lat": 35.393089294433594,
+  "origin_lon": -97.6007308959961,
+}
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows 4`] = `
+Object {
+  "origin_lat": 29.533695220947266,
+  "origin_lon": -98.46977996826172,
+}
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows 5`] = `
+Object {
+  "origin_lat": 29.533695220947266,
+  "origin_lon": -98.46977996826172,
+}
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  35.393089294433594,
+  -97.6007308959961,
+]
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  35.393089294433594,
+  -97.6007308959961,
+]
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  35.393089294433594,
+  -97.6007308959961,
+]
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows compact 4`] = `
+Array [
+  29.533695220947266,
+  -98.46977996826172,
+]
+`;
+
+exports[`multipart latlong Arrow Table enumerates Table rows compact 5`] = `
+Array [
+  29.533695220947266,
+  -98.46977996826172,
+]
+`;
+
+exports[`multipart latlong Arrow Table toString() prints a pretty Table 1`] = `
+"        origin_lat,         origin_lon
+35.393089294433594,  -97.6007308959961
+35.393089294433594,  -97.6007308959961
+35.393089294433594,  -97.6007308959961
+29.533695220947266, -98.46977996826172
+29.533695220947266, -98.46977996826172"
+`;
+
+exports[`multipart latlong Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`multipart latlong Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,         origin_lat,         origin_lon
+    0, 35.393089294433594,  -97.6007308959961
+    1, 35.393089294433594,  -97.6007308959961
+    2, 35.393089294433594,  -97.6007308959961
+    3, 29.533695220947266, -98.46977996826172
+    4, 29.533695220947266, -98.46977996826172"
+`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 1`] = `"origin_city"`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 2`] = `"Utf8"`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 3`] = `5`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 4`] = `"Oklahoma City"`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 5`] = `"Oklahoma City"`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 6`] = `"Oklahoma City"`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 7`] = `"San Antonio"`;
+
+exports[`multipart origins Arrow Table creates a Table from Arrow buffers 8`] = `"San Antonio"`;
+
+exports[`multipart origins Arrow Table enumerates Table rows 1`] = `
+Object {
+  "origin_city": "Oklahoma City",
+}
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows 2`] = `
+Object {
+  "origin_city": "Oklahoma City",
+}
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows 3`] = `
+Object {
+  "origin_city": "Oklahoma City",
+}
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows 4`] = `
+Object {
+  "origin_city": "San Antonio",
+}
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows 5`] = `
+Object {
+  "origin_city": "San Antonio",
+}
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  "Oklahoma City",
+]
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  "Oklahoma City",
+]
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  "Oklahoma City",
+]
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows compact 4`] = `
+Array [
+  "San Antonio",
+]
+`;
+
+exports[`multipart origins Arrow Table enumerates Table rows compact 5`] = `
+Array [
+  "San Antonio",
+]
+`;
+
+exports[`multipart origins Arrow Table toString() prints a pretty Table 1`] = `
+"  origin_city
+Oklahoma City
+Oklahoma City
+Oklahoma City
+  San Antonio
+  San Antonio"
+`;
+
+exports[`multipart origins Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`multipart origins Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,   origin_city
+    0, Oklahoma City
+    1, Oklahoma City
+    2, Oklahoma City
+    3,   San Antonio
+    4,   San Antonio"
+`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 1`] = `"foo"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 2`] = `"Int"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 3`] = `5`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 4`] = `1`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 5`] = `null`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 6`] = `3`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 7`] = `4`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 8`] = `5`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 9`] = `"bar"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 10`] = `"FloatingPoint"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 11`] = `5`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 12`] = `1`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 13`] = `null`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 14`] = `null`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 15`] = `4`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 16`] = `5`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 17`] = `"baz"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 18`] = `"Utf8"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 19`] = `5`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 20`] = `"aa"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 21`] = `null`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 22`] = `null`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 23`] = `"bbb"`;
+
+exports[`simple file Arrow Table creates a Table from Arrow buffers 24`] = `"cccc"`;
+
+exports[`simple file Arrow Table enumerates Table rows 1`] = `
+Object {
+  "bar": 1,
+  "baz": "aa",
+  "foo": 1,
+}
+`;
+
+exports[`simple file Arrow Table enumerates Table rows 2`] = `
+Object {
+  "bar": null,
+  "baz": null,
+  "foo": null,
+}
+`;
+
+exports[`simple file Arrow Table enumerates Table rows 3`] = `
+Object {
+  "bar": null,
+  "baz": null,
+  "foo": 3,
+}
+`;
+
+exports[`simple file Arrow Table enumerates Table rows 4`] = `
+Object {
+  "bar": 4,
+  "baz": "bbb",
+  "foo": 4,
+}
+`;
+
+exports[`simple file Arrow Table enumerates Table rows 5`] = `
+Object {
+  "bar": 5,
+  "baz": "cccc",
+  "foo": 5,
+}
+`;
+
+exports[`simple file Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  1,
+  1,
+  "aa",
+]
+`;
+
+exports[`simple file Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  null,
+  null,
+  null,
+]
+`;
+
+exports[`simple file Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  3,
+  null,
+  null,
+]
+`;
+
+exports[`simple file Arrow Table enumerates Table rows compact 4`] = `
+Array [
+  4,
+  4,
+  "bbb",
+]
+`;
+
+exports[`simple file Arrow Table enumerates Table rows compact 5`] = `
+Array [
+  5,
+  5,
+  "cccc",
+]
+`;
+
+exports[`simple file Arrow Table toString() prints a pretty Table 1`] = `
+" foo,  bar,  baz
+   1,    1,   aa
+null, null, null
+   3, null, null
+   4,    4,  bbb
+   5,    5, cccc"
+`;
+
+exports[`simple file Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`simple file Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,  foo,  bar,  baz
+    0,    1,    1,   aa
+    1, null, null, null
+    2,    3, null, null
+    3,    4,    4,  bbb
+    4,    5,    5, cccc"
+`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 1`] = `"foo"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 2`] = `"Int"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 3`] = `5`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 4`] = `1`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 5`] = `null`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 6`] = `3`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 7`] = `4`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 8`] = `5`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 9`] = `"bar"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 10`] = `"FloatingPoint"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 11`] = `5`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 12`] = `1`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 13`] = `null`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 14`] = `null`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 15`] = `4`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 16`] = `5`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 17`] = `"baz"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 18`] = `"Utf8"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 19`] = `5`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 20`] = `"aa"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 21`] = `null`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 22`] = `null`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 23`] = `"bbb"`;
+
+exports[`simple stream Arrow Table creates a Table from Arrow buffers 24`] = `"cccc"`;
+
+exports[`simple stream Arrow Table enumerates Table rows 1`] = `
+Object {
+  "bar": 1,
+  "baz": "aa",
+  "foo": 1,
+}
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows 2`] = `
+Object {
+  "bar": null,
+  "baz": null,
+  "foo": null,
+}
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows 3`] = `
+Object {
+  "bar": null,
+  "baz": null,
+  "foo": 3,
+}
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows 4`] = `
+Object {
+  "bar": 4,
+  "baz": "bbb",
+  "foo": 4,
+}
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows 5`] = `
+Object {
+  "bar": 5,
+  "baz": "cccc",
+  "foo": 5,
+}
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  1,
+  1,
+  "aa",
+]
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  null,
+  null,
+  null,
+]
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  3,
+  null,
+  null,
+]
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows compact 4`] = `
+Array [
+  4,
+  4,
+  "bbb",
+]
+`;
+
+exports[`simple stream Arrow Table enumerates Table rows compact 5`] = `
+Array [
+  5,
+  5,
+  "cccc",
+]
+`;
+
+exports[`simple stream Arrow Table toString() prints a pretty Table 1`] = `
+" foo,  bar,  baz
+   1,    1,   aa
+null, null, null
+   3, null, null
+   4,    4,  bbb
+   5,    5, cccc"
+`;
+
+exports[`simple stream Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`simple stream Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,  foo,  bar,  baz
+    0,    1,    1,   aa
+    1, null, null, null
+    2,    3, null, null
+    3,    4,    4,  bbb
+    4,    5,    5, cccc"
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 1`] = `"struct_nullable"`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 2`] = `"Struct_"`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 3`] = `17`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 4`] = `null`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 5`] = `
+Array [
+  null,
+  "MhRNxD4",
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 6`] = `
+Array [
+  137773603,
+  "3F9HBxK",
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 7`] = `
+Array [
+  410361374,
+  "aVd88fp",
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 8`] = `null`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 9`] = `
+Array [
+  null,
+  "3loZrRf",
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 10`] = `null`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 11`] = `null`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 12`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 13`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 14`] = `null`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 15`] = `
+Array [
+  null,
+  "78SLiRw",
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 16`] = `null`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 17`] = `null`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 18`] = `
+Array [
+  null,
+  "0ilsf82",
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 19`] = `
+Array [
+  null,
+  "LjS9MbU",
+]
+`;
+
+exports[`struct file Arrow Table creates a Table from Arrow buffers 20`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 1`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 2`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "MhRNxD4",
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 3`] = `
+Object {
+  "struct_nullable": Array [
+    137773603,
+    "3F9HBxK",
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 4`] = `
+Object {
+  "struct_nullable": Array [
+    410361374,
+    "aVd88fp",
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 5`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 6`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "3loZrRf",
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 7`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 8`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 9`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    null,
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 10`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    null,
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 11`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 12`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "78SLiRw",
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 13`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 14`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 15`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "0ilsf82",
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 16`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "LjS9MbU",
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows 17`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    null,
+  ],
+}
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  Array [
+    null,
+    "MhRNxD4",
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  Array [
+    137773603,
+    "3F9HBxK",
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 4`] = `
+Array [
+  Array [
+    410361374,
+    "aVd88fp",
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 5`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 6`] = `
+Array [
+  Array [
+    null,
+    "3loZrRf",
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 7`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 8`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 9`] = `
+Array [
+  Array [
+    null,
+    null,
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 10`] = `
+Array [
+  Array [
+    null,
+    null,
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 11`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 12`] = `
+Array [
+  Array [
+    null,
+    "78SLiRw",
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 13`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 14`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 15`] = `
+Array [
+  Array [
+    null,
+    "0ilsf82",
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 16`] = `
+Array [
+  Array [
+    null,
+    "LjS9MbU",
+  ],
+]
+`;
+
+exports[`struct file Arrow Table enumerates Table rows compact 17`] = `
+Array [
+  Array [
+    null,
+    null,
+  ],
+]
+`;
+
+exports[`struct file Arrow Table toString() prints a pretty Table 1`] = `
+"  struct_nullable
+             null
+         ,MhRNxD4
+137773603,3F9HBxK
+410361374,aVd88fp
+             null
+         ,3loZrRf
+             null
+             null
+                ,
+                ,
+             null
+         ,78SLiRw
+             null
+             null
+         ,0ilsf82
+         ,LjS9MbU
+                ,"
+`;
+
+exports[`struct file Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`struct file Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,   struct_nullable
+    0,              null
+    1,          ,MhRNxD4
+    2, 137773603,3F9HBxK
+    3, 410361374,aVd88fp
+    4,              null
+    5,          ,3loZrRf
+    6,              null
+    7,              null
+    8,                 ,
+    9,                 ,
+   10,              null
+   11,          ,78SLiRw
+   12,              null
+   13,              null
+   14,          ,0ilsf82
+   15,          ,LjS9MbU
+   16,                 ,"
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 1`] = `"struct_nullable"`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 2`] = `"Struct_"`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 3`] = `17`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 4`] = `null`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 5`] = `
+Array [
+  null,
+  "MhRNxD4",
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 6`] = `
+Array [
+  137773603,
+  "3F9HBxK",
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 7`] = `
+Array [
+  410361374,
+  "aVd88fp",
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 8`] = `null`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 9`] = `
+Array [
+  null,
+  "3loZrRf",
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 10`] = `null`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 11`] = `null`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 12`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 13`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 14`] = `null`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 15`] = `
+Array [
+  null,
+  "78SLiRw",
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 16`] = `null`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 17`] = `null`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 18`] = `
+Array [
+  null,
+  "0ilsf82",
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 19`] = `
+Array [
+  null,
+  "LjS9MbU",
+]
+`;
+
+exports[`struct stream Arrow Table creates a Table from Arrow buffers 20`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 1`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 2`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "MhRNxD4",
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 3`] = `
+Object {
+  "struct_nullable": Array [
+    137773603,
+    "3F9HBxK",
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 4`] = `
+Object {
+  "struct_nullable": Array [
+    410361374,
+    "aVd88fp",
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 5`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 6`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "3loZrRf",
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 7`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 8`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 9`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    null,
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 10`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    null,
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 11`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 12`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "78SLiRw",
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 13`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 14`] = `
+Object {
+  "struct_nullable": null,
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 15`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "0ilsf82",
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 16`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    "LjS9MbU",
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows 17`] = `
+Object {
+  "struct_nullable": Array [
+    null,
+    null,
+  ],
+}
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 1`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 2`] = `
+Array [
+  Array [
+    null,
+    "MhRNxD4",
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 3`] = `
+Array [
+  Array [
+    137773603,
+    "3F9HBxK",
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 4`] = `
+Array [
+  Array [
+    410361374,
+    "aVd88fp",
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 5`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 6`] = `
+Array [
+  Array [
+    null,
+    "3loZrRf",
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 7`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 8`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 9`] = `
+Array [
+  Array [
+    null,
+    null,
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 10`] = `
+Array [
+  Array [
+    null,
+    null,
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 11`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 12`] = `
+Array [
+  Array [
+    null,
+    "78SLiRw",
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 13`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 14`] = `
+Array [
+  null,
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 15`] = `
+Array [
+  Array [
+    null,
+    "0ilsf82",
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 16`] = `
+Array [
+  Array [
+    null,
+    "LjS9MbU",
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table enumerates Table rows compact 17`] = `
+Array [
+  Array [
+    null,
+    null,
+  ],
+]
+`;
+
+exports[`struct stream Arrow Table toString() prints a pretty Table 1`] = `
+"  struct_nullable
+             null
+         ,MhRNxD4
+137773603,3F9HBxK
+410361374,aVd88fp
+             null
+         ,3loZrRf
+             null
+             null
+                ,
+                ,
+             null
+         ,78SLiRw
+             null
+             null
+         ,0ilsf82
+         ,LjS9MbU
+                ,"
+`;
+
+exports[`struct stream Arrow Table toString() prints an empty Table 1`] = `""`;
+
+exports[`struct stream Arrow Table toString({ index: true }) prints a pretty Table with an Index column 1`] = `
+"Index,   struct_nullable
+    0,              null
+    1,          ,MhRNxD4
+    2, 137773603,3F9HBxK
+    3, 410361374,aVd88fp
+    4,              null
+    5,          ,3loZrRf
+    6,              null
+    7,              null
+    8,                 ,
+    9,                 ,
+   10,              null
+   11,          ,78SLiRw
+   12,              null
+   13,              null
+   14,          ,0ilsf82
+   15,          ,LjS9MbU
+   16,                 ,"
+`;

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/file/dictionary.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/file/dictionary.arrow b/js/test/arrows/file/dictionary.arrow
new file mode 100644
index 0000000..34d41db
Binary files /dev/null and b/js/test/arrows/file/dictionary.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/file/dictionary2.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/file/dictionary2.arrow b/js/test/arrows/file/dictionary2.arrow
new file mode 100644
index 0000000..1537f54
Binary files /dev/null and b/js/test/arrows/file/dictionary2.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/file/multi_dictionary.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/file/multi_dictionary.arrow b/js/test/arrows/file/multi_dictionary.arrow
new file mode 100644
index 0000000..113d30d
Binary files /dev/null and b/js/test/arrows/file/multi_dictionary.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/file/simple.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/file/simple.arrow b/js/test/arrows/file/simple.arrow
new file mode 100644
index 0000000..838db6d
Binary files /dev/null and b/js/test/arrows/file/simple.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/file/struct.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/file/struct.arrow b/js/test/arrows/file/struct.arrow
new file mode 100644
index 0000000..3d2c018
Binary files /dev/null and b/js/test/arrows/file/struct.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/multi/count/records.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/multi/count/records.arrow b/js/test/arrows/multi/count/records.arrow
new file mode 100644
index 0000000..00d8837
Binary files /dev/null and b/js/test/arrows/multi/count/records.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/multi/count/schema.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/multi/count/schema.arrow b/js/test/arrows/multi/count/schema.arrow
new file mode 100644
index 0000000..dfd24e9
Binary files /dev/null and b/js/test/arrows/multi/count/schema.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/multi/latlong/records.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/multi/latlong/records.arrow b/js/test/arrows/multi/latlong/records.arrow
new file mode 100644
index 0000000..563d12d
Binary files /dev/null and b/js/test/arrows/multi/latlong/records.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/multi/latlong/schema.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/multi/latlong/schema.arrow b/js/test/arrows/multi/latlong/schema.arrow
new file mode 100644
index 0000000..638b2ab
Binary files /dev/null and b/js/test/arrows/multi/latlong/schema.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/multi/origins/records.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/multi/origins/records.arrow b/js/test/arrows/multi/origins/records.arrow
new file mode 100644
index 0000000..49a8c40
Binary files /dev/null and b/js/test/arrows/multi/origins/records.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/multi/origins/schema.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/multi/origins/schema.arrow b/js/test/arrows/multi/origins/schema.arrow
new file mode 100644
index 0000000..0d10fb0
Binary files /dev/null and b/js/test/arrows/multi/origins/schema.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/stream/dictionary.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/stream/dictionary.arrow b/js/test/arrows/stream/dictionary.arrow
new file mode 100644
index 0000000..17ca48b
Binary files /dev/null and b/js/test/arrows/stream/dictionary.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/stream/simple.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/stream/simple.arrow b/js/test/arrows/stream/simple.arrow
new file mode 100644
index 0000000..2c68c0e
Binary files /dev/null and b/js/test/arrows/stream/simple.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/arrows/stream/struct.arrow
----------------------------------------------------------------------
diff --git a/js/test/arrows/stream/struct.arrow b/js/test/arrows/stream/struct.arrow
new file mode 100644
index 0000000..4e97b70
Binary files /dev/null and b/js/test/arrows/stream/struct.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/reader-tests.ts
----------------------------------------------------------------------
diff --git a/js/test/reader-tests.ts b/js/test/reader-tests.ts
new file mode 100644
index 0000000..a7f9f41
--- /dev/null
+++ b/js/test/reader-tests.ts
@@ -0,0 +1,50 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { readBuffers } from './Arrow';
+import arrowTestConfigurations from './test-config';
+
+for (let [name, ...buffers] of arrowTestConfigurations) {
+    describe(`${name} readBuffers`, () => {
+        test(`enumerates each batch as an Array of Vectors`, () => {
+            expect.hasAssertions();
+            for (let vectors of readBuffers(...buffers)) {
+                for (let vector of vectors) {
+                    expect(vector.name).toMatchSnapshot();
+                    expect(vector.type).toMatchSnapshot();
+                    expect(vector.length).toMatchSnapshot();
+                        for (let i = -1, n = vector.length; ++i < n;) {
+                        expect(vector.get(i)).toMatchSnapshot();
+                    }
+                }
+            }
+        });
+        test(`vector iterators report the same values as get`, () => {
+            expect.hasAssertions();
+            for (let vectors of readBuffers(...buffers)) {
+                for (let vector of vectors) {
+                    let i = -1, n = vector.length;
+                    for (let v of vector) {
+                        expect(++i).toBeLessThan(n);
+                        expect(v).toEqual(vector.get(i));
+                    }
+                    expect(++i).toEqual(n);
+                }
+            }
+        });
+    });
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/table-tests.ts
----------------------------------------------------------------------
diff --git a/js/test/table-tests.ts b/js/test/table-tests.ts
new file mode 100644
index 0000000..c840299
--- /dev/null
+++ b/js/test/table-tests.ts
@@ -0,0 +1,88 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Table, readBuffers } from './Arrow';
+import arrowTestConfigurations from './test-config';
+
+for (let [name, ...buffers] of arrowTestConfigurations) {
+    describe(`${name} Table`, () => {
+        test(`creates a Table from Arrow buffers`, () => {
+            expect.hasAssertions();
+            const table = Table.from(...buffers);
+            for (const vector of table.cols()) {
+                expect(vector.name).toMatchSnapshot();
+                expect(vector.type).toMatchSnapshot();
+                expect(vector.length).toMatchSnapshot();
+                for (let i = -1, n = vector.length; ++i < n;) {
+                    expect(vector.get(i)).toMatchSnapshot();
+                }
+            }
+        });
+        test(`vector iterators report the same values as get`, () => {
+            expect.hasAssertions();
+            const table = Table.from(...buffers);
+            for (const vector of table.cols()) {
+                let i = -1, n = vector.length;
+                for (let v of vector) {
+                    expect(++i).toBeLessThan(n);
+                    expect(v).toEqual(vector.get(i));
+                }
+                expect(++i).toEqual(n);
+            }
+        });
+        test(`batch and Table Vectors report the same values`, () => {
+            expect.hasAssertions();
+            let rowsTotal = 0, table = Table.from(...buffers);
+            for (let vectors of readBuffers(...buffers)) {
+                let rowsNow = Math.max(...vectors.map((v) => v.length));
+                for (let vi = -1, vn = vectors.length; ++vi < vn;) {
+                    let v1 = vectors[vi];
+                    let v2 = table.getColumnAt(vi);
+                    expect(v1.name).toEqual(v2.name);
+                    expect(v1.type).toEqual(v2.type);
+                    for (let i = -1, n = v1.length; ++i < n;) {
+                        expect(v1.get(i)).toEqual(v2.get(i + rowsTotal));
+                    }
+                }
+                rowsTotal += rowsNow;
+            }
+        });
+        test(`enumerates Table rows`, () => {
+            expect.hasAssertions();
+            const table = Table.from(...buffers);
+            for (const row of table.rows()) {
+                expect(row).toMatchSnapshot();
+            }
+        });
+        test(`enumerates Table rows compact`, () => {
+            expect.hasAssertions();
+            const table = Table.from(...buffers);
+            for (const row of table.rows(true)) {
+                expect(row).toMatchSnapshot();
+            }
+        });
+        test(`toString() prints an empty Table`, () => {
+            expect(Table.from().toString()).toMatchSnapshot();
+        });
+        test(`toString() prints a pretty Table`, () => {
+            expect(Table.from(...buffers).toString()).toMatchSnapshot();
+        });
+        test(`toString({ index: true }) prints a pretty Table with an Index column`, () => {
+            expect(Table.from(...buffers).toString({ index: true })).toMatchSnapshot();
+        });
+    });
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/test-config.ts
----------------------------------------------------------------------
diff --git a/js/test/test-config.ts b/js/test/test-config.ts
new file mode 100644
index 0000000..b31ff11
--- /dev/null
+++ b/js/test/test-config.ts
@@ -0,0 +1,42 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import * as fs from 'fs';
+import * as path from 'path';
+const arrowFormats = ['file', 'stream'];
+const arrowFileNames = ['simple', 'struct', 'dictionary', 'dictionary2', 'multi_dictionary'];
+const multipartArrows = ['count', 'latlong', 'origins'];
+export let arrowTestConfigurations = [];
+
+arrowTestConfigurations = arrowFormats.reduce((configs, format) => {
+    return arrowFileNames.reduce((configs, name) => {
+        const arrowPath = path.resolve(__dirname, `./arrows/${format}/${name}.arrow`);
+        try {
+            const arrowFile = fs.readFileSync(arrowPath);
+            return [...configs, [`${name} ${format} Arrow`, arrowFile]];
+        } catch (e) {}
+        return configs;
+    }, configs);
+}, arrowTestConfigurations);
+
+arrowTestConfigurations = multipartArrows.reduce((configs, folder) => {
+    const schemaPath = path.resolve(__dirname, `./arrows/multi/${folder}/schema.arrow`);
+    const recordsPath = path.resolve(__dirname, `./arrows/multi/${folder}/records.arrow`);
+    return [...configs, [`multipart ${folder} Arrow`, fs.readFileSync(schemaPath), fs.readFileSync(recordsPath)]];
+}, arrowTestConfigurations);
+
+export default arrowTestConfigurations;

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/tsconfig.json
----------------------------------------------------------------------
diff --git a/js/test/tsconfig.json b/js/test/tsconfig.json
new file mode 100644
index 0000000..c1ae204
--- /dev/null
+++ b/js/test/tsconfig.json
@@ -0,0 +1,11 @@
+{
+  "extends": "../tsconfig.json",
+  "include": ["./**/*.ts"],
+  "compilerOptions": {
+    "target": "ESNEXT",
+    "module": "commonjs",
+    "allowJs": true,
+    "importHelpers": false,
+    "noEmitHelpers": false
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/vector-tests.ts
----------------------------------------------------------------------
diff --git a/js/test/vector-tests.ts b/js/test/vector-tests.ts
new file mode 100644
index 0000000..54d50d1
--- /dev/null
+++ b/js/test/vector-tests.ts
@@ -0,0 +1,226 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { vectors } from './Arrow';
+import { flatbuffers } from 'flatbuffers';
+import Long = flatbuffers.Long;
+const BitVector = vectors.BitVector;
+const TypedVector = vectors.TypedVector;
+
+const LongVectors = {
+    Int64Vector: vectors.Int64Vector,
+    Uint64Vector: vectors.Uint64Vector,
+};
+
+const ByteVectors = {
+    Int8Vector: vectors.Int8Vector,
+    Int16Vector: vectors.Int16Vector,
+    Int32Vector: vectors.Int32Vector,
+    Uint8Vector: vectors.Uint8Vector,
+    Uint16Vector: vectors.Uint16Vector,
+    Uint32Vector: vectors.Uint32Vector,
+    Float32Vector: vectors.Float32Vector,
+    Float64Vector: vectors.Float64Vector,
+};
+
+const longVectors = toMap<typeof TypedVector>(vectors, Object.keys(LongVectors));
+const byteVectors = toMap<typeof TypedVector>(vectors, Object.keys(ByteVectors));
+const bytes = Array.from(
+    { length: 5 },
+    () => Uint8Array.from(
+        { length: 64 },
+        () => Math.random() * 255 | 0));
+
+describe(`BitVector`, () => {
+    const vector = new BitVector(new Uint8Array([27, 0, 0, 0, 0, 0, 0, 0]));
+    const values = [true, true, false, true, true, false, false, false];
+    const n = values.length;
+    vector.length = 1;
+    test(`gets expected values`, () => {
+        let i = -1;
+        while (++i < n) {
+            expect(vector.get(i)).toEqual(values[i]);
+        }
+    });
+    test(`iterates expected values`, () => {
+        let i = -1;
+        for (let v of vector) {
+            expect(++i).toBeLessThan(n);
+            expect(v).toEqual(values[i]);
+        }
+    });
+    test(`can set values to true and false`, () => {
+        const v = new BitVector(new Uint8Array([27, 0, 0, 0, 0, 0, 0, 0]));
+        const expected1 = [true, true, false, true, true, false, false, false];
+        const expected2 = [true, true,  true, true, true, false, false, false];
+        const expected3 = [true, true, false, false, false, false, true, true];
+        function validate(expected) {
+            for (let i = -1; ++i < n;) {
+                expect(v.get(i)).toEqual(expected[i]);
+            }
+        }
+        validate(expected1);
+        v.set(2, true);
+        validate(expected2);
+        v.set(2, false);
+        validate(expected1);
+        v.set(3, false);
+        v.set(4, false);
+        v.set(6, true);
+        v.set(7, true);
+        validate(expected3);
+        v.set(3, true);
+        v.set(4, true);
+        v.set(6, false);
+        v.set(7, false);
+        validate(expected1);
+    });
+    test(`packs 0 values`, () => {
+        expect(BitVector.pack([])).toEqual(
+            new Uint8Array([0, 0, 0, 0, 0, 0, 0, 0]));
+    });
+    test(`packs 3 values`, () => {
+        expect(BitVector.pack([
+            true, false, true
+        ])).toEqual(new Uint8Array([5, 0, 0, 0, 0, 0, 0, 0]));
+    });
+    test(`packs 8 values`, () => {
+        expect(BitVector.pack([
+            true, true, false, true, true, false, false, false
+        ])).toEqual(new Uint8Array([27, 0, 0, 0, 0, 0, 0, 0]));
+    });
+    test(`packs 25 values`, () => {
+        expect(BitVector.pack([
+            true, true, false, true, true, false, false, false,
+            false, false, false, true, true, false, true, true,
+            false
+        ])).toEqual(new Uint8Array([27, 216, 0, 0, 0, 0, 0, 0]));
+    });
+    test(`from with boolean Array packs values`, () => {
+        expect(BitVector.from([
+            true, false, true
+        ]).slice()).toEqual(new Uint8Array([5, 0, 0, 0, 0, 0, 0, 0]));
+    });
+});
+
+for (const [VectorName, VectorType] of longVectors) {
+    const ArrayType = VectorType.prototype.arrayType;
+    describe(`${VectorName}`, () => {
+        const values = concatTyped(ArrayType, ...bytes);
+        const bLists = bytes.map((b) => new ArrayType(b.buffer));
+        const vector = new VectorType(null, ...bLists);
+        const n = vector.length = values.length * 0.5;
+        test(`gets expected values`, () => {
+            let i = -1;
+            while (++i < n) {
+                expect(vector.get(i)).toEqual(new Long(
+                    values[i * 2], values[i * 2 + 1]
+                ));
+            }
+        });
+        test(`iterates expected values`, () => {
+            let i = -1;
+            for (let v of vector) {
+                expect(++i).toBeLessThan(n);
+                expect(v).toEqual(new Long(
+                    values[i * 2], values[i * 2 + 1]
+                ));
+            }
+        });
+        test(`slices the entire array`, () => {
+            expect(vector.slice()).toEqual(values);
+        });
+        test(`slice returns a TypedArray`, () => {
+            expect(vector.slice()).toBeInstanceOf(ArrayType);
+        });
+        test(`slices from -20 to length`, () => {
+            expect(vector.slice(-20)).toEqual(values.slice(-40));
+        });
+        test(`slices from 0 to -20`, () => {
+            expect(vector.slice(0, -20)).toEqual(values.slice(0, -40));
+        });
+        test(`slices the array from 0 to length - 20`, () => {
+            expect(vector.slice(0, n - 20)).toEqual(values.slice(0, values.length - 40));
+        });
+        test(`slices the array from 0 to length + 20`, () => {
+            expect(vector.slice(0, n + 20)).toEqual(
+                concatTyped(ArrayType, values, values.slice(0, 40)));
+        });
+    });
+}
+
+for (const [VectorName, VectorType] of byteVectors) {
+    const ArrayType = VectorType.prototype.arrayType;
+    describe(`${VectorName}`, () => {
+        const values = concatTyped(ArrayType, ...bytes);
+        const bLists = bytes.map((b) => new ArrayType(b.buffer));
+        const vector = new VectorType(null, ...bLists);
+        const n = vector.length = values.length;
+        test(`gets expected values`, () => {
+            let i = -1;
+            while (++i < n) {
+                expect(vector.get(i)).toEqual(values[i]);
+            }
+        });
+        test(`iterates expected values`, () => {
+            expect.hasAssertions();
+            let i = -1;
+            for (let v of vector) {
+                expect(++i).toBeLessThan(n);
+                expect(v).toEqual(values[i]);
+            }
+        });
+        test(`slices the entire array`, () => {
+            expect(vector.slice()).toEqual(values);
+        });
+        test(`slice returns a TypedArray`, () => {
+            expect(vector.slice()).toBeInstanceOf(ArrayType);
+        });
+        test(`slices from -20 to length`, () => {
+            expect(vector.slice(-20)).toEqual(values.slice(-20));
+        });
+        test(`slices from 0 to -20`, () => {
+            expect(vector.slice(0, -20)).toEqual(values.slice(0, -20));
+        });
+        test(`slices the array from 0 to length - 20`, () => {
+            expect(vector.slice(0, n - 20)).toEqual(values.slice(0, n - 20));
+        });
+        test(`slices the array from 0 to length + 20`, () => {
+            expect(vector.slice(0, n + 20)).toEqual(
+                concatTyped(ArrayType, values, values.slice(0, 20)));
+        });
+    });
+}
+
+function toMap<T>(entries: any, keys: string[]) {
+    return keys.reduce((map, key) => {
+        map.set(key, entries[key] as T);
+        return map;
+    }, new Map<string, T>());
+}
+
+function concatTyped(ArrayType: any, ...bytes: any[]) {
+    const BPM = ArrayType.BYTES_PER_ELEMENT;
+    return bytes.reduce((v, bytes) => {
+        const l = bytes.byteLength / BPM;
+        const a = new ArrayType(v.length + l);
+        const b = new ArrayType(bytes.buffer);
+        a.set(v);
+        a.set(b, v.length);
+        return a;
+    }, new ArrayType(0)) as Array<number>;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig.json
----------------------------------------------------------------------
diff --git a/js/tsconfig.json b/js/tsconfig.json
index f2ad0e8..a56166b 100644
--- a/js/tsconfig.json
+++ b/js/tsconfig.json
@@ -1,14 +1,11 @@
 {
-  "compilerOptions": {
-    "module": "commonjs",
-    "target": "es5",
-    "lib": ["es2015", "dom"],
-    "outDir": "lib",
-    "moduleResolution": "node",
-    "sourceMap": true,
-    "declaration": true
+  "extends": "./tsconfig/tsconfig.base.json",
+  "formatCodeOptions": {
+    "tabSize": 4,
+    "indentSize": 4
   },
-  "include": [
-    "src/*.ts"
-  ]
+  "compilerOptions": {
+    "target": "ESNEXT",
+    "module": "es2015"
+  }
 }

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.base.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.base.json b/js/tsconfig/tsconfig.base.json
new file mode 100644
index 0000000..4723854
--- /dev/null
+++ b/js/tsconfig/tsconfig.base.json
@@ -0,0 +1,25 @@
+{
+  "exclude": ["../node_modules"],
+  "include": ["../src/**/*.ts"],
+  "compilerOptions": {
+    "lib": ["dom", "esnext", "esnext.asynciterable"],
+    "moduleResolution": "node",
+    "sourceMap": false,
+    "inlineSources": true,
+    "inlineSourceMap": true,
+    "declaration": true,
+    "skipLibCheck": true,
+    "importHelpers": true,
+    "noEmitHelpers": true,
+    "noImplicitAny": false,
+    "noEmitOnError": false,
+    "noImplicitThis": true,
+    "noUnusedLocals": true,
+    "removeComments": false,
+    "downlevelIteration": true,
+    "noImplicitUseStrict": true,
+    "preserveConstEnums": false,
+    "noFallthroughCasesInSwitch": true,
+    "forceConsistentCasingInFileNames": true
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.es2015.cjs.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.es2015.cjs.json b/js/tsconfig/tsconfig.es2015.cjs.json
new file mode 100644
index 0000000..7e92d5d
--- /dev/null
+++ b/js/tsconfig/tsconfig.es2015.cjs.json
@@ -0,0 +1,8 @@
+//Compiler configuaration to build the ES2015 CommonJS target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ES2015",
+    "module": "commonjs"
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.es2015.cls.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.es2015.cls.json b/js/tsconfig/tsconfig.es2015.cls.json
new file mode 100644
index 0000000..11ccc04
--- /dev/null
+++ b/js/tsconfig/tsconfig.es2015.cls.json
@@ -0,0 +1,10 @@
+//Compiler configuaration to build the ES2015 Closure Compiler target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ES2015",
+    "module": "es2015",
+    "noEmitHelpers": true,
+    "importHelpers": false
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.es2015.esm.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.es2015.esm.json b/js/tsconfig/tsconfig.es2015.esm.json
new file mode 100644
index 0000000..a8b3454
--- /dev/null
+++ b/js/tsconfig/tsconfig.es2015.esm.json
@@ -0,0 +1,8 @@
+//Compiler configuaration to build the ES2015 ESModules target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ES2015",
+    "module": "es2015"
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.es5.cjs.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.es5.cjs.json b/js/tsconfig/tsconfig.es5.cjs.json
new file mode 100644
index 0000000..cab55c1
--- /dev/null
+++ b/js/tsconfig/tsconfig.es5.cjs.json
@@ -0,0 +1,8 @@
+//Compiler configuaration to build the ES5 CommonJS target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ES5",
+    "module": "commonjs"
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.es5.cls.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.es5.cls.json b/js/tsconfig/tsconfig.es5.cls.json
new file mode 100644
index 0000000..55f7ea5
--- /dev/null
+++ b/js/tsconfig/tsconfig.es5.cls.json
@@ -0,0 +1,10 @@
+//Compiler configuaration to build the ES5 Closure Compiler target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ES5",
+    "module": "es2015",
+    "noEmitHelpers": true,
+    "importHelpers": false
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.es5.esm.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.es5.esm.json b/js/tsconfig/tsconfig.es5.esm.json
new file mode 100644
index 0000000..7780883
--- /dev/null
+++ b/js/tsconfig/tsconfig.es5.esm.json
@@ -0,0 +1,8 @@
+//Compiler configuaration to build the ES5 ESModules target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ES5",
+    "module": "es2015"
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.esnext.cjs.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.esnext.cjs.json b/js/tsconfig/tsconfig.esnext.cjs.json
new file mode 100644
index 0000000..06b2ee5
--- /dev/null
+++ b/js/tsconfig/tsconfig.esnext.cjs.json
@@ -0,0 +1,8 @@
+//Compiler configuaration to build the ESNext CommonJS target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ESNEXT",
+    "module": "commonjs"
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.esnext.cls.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.esnext.cls.json b/js/tsconfig/tsconfig.esnext.cls.json
new file mode 100644
index 0000000..009a5ac
--- /dev/null
+++ b/js/tsconfig/tsconfig.esnext.cls.json
@@ -0,0 +1,10 @@
+//Compiler configuaration to build the ESNext Closure Compiler target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ESNEXT",
+    "module": "es2015",
+    "noEmitHelpers": true,
+    "importHelpers": false
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tsconfig/tsconfig.esnext.esm.json
----------------------------------------------------------------------
diff --git a/js/tsconfig/tsconfig.esnext.esm.json b/js/tsconfig/tsconfig.esnext.esm.json
new file mode 100644
index 0000000..c614f2e
--- /dev/null
+++ b/js/tsconfig/tsconfig.esnext.esm.json
@@ -0,0 +1,8 @@
+//Compiler configuaration to build the ESNext ESModules target
+{
+  "extends": "./tsconfig.base.json",
+  "compilerOptions": {
+    "target": "ESNEXT",
+    "module": "es2015"
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/tslint.json
----------------------------------------------------------------------
diff --git a/js/tslint.json b/js/tslint.json
index 5f2f6cd..705ef8c 100644
--- a/js/tslint.json
+++ b/js/tslint.json
@@ -1,12 +1,39 @@
 {
-    "defaultSeverity": "error",
-    "extends": [
-        "tslint:recommended"
-    ],
-    "jsRules": {},
-    "rules": {
-      "no-bitwise": false,
-      "max-classes-per-file": false
-    },
-    "rulesDirectory": []
-}
+  "rules": {
+    "curly": true,
+    "eofline": false,
+    "align": [true, "parameters"],
+    "class-name": true,
+    "indent": [true, "spaces"],
+    "max-line-length": [false, 150],
+    "no-consecutive-blank-lines": [true],
+    "no-trailing-whitespace": true,
+    "no-duplicate-variable": true,
+    "no-var-keyword": true,
+    "no-empty": false,
+    "no-unused-expression": false,
+    "no-use-before-declare": false,
+    "no-var-requires": true,
+    "no-require-imports": true,
+    "one-line": [true,
+      "check-else",
+      "check-whitespace",
+      "check-open-brace"],
+    "quotemark": [true,
+      "single",
+      "avoid-escape"],
+    "semicolon": [true, "always"],
+    "typedef-whitespace": [true, {
+      "call-signature": "nospace",
+      "index-signature": "nospace",
+      "parameter": "nospace",
+      "property-declaration": "nospace",
+      "variable-declaration": "nospace"
+    }],
+    "whitespace": [true,
+      "check-branch",
+      "check-decl",
+      "check-operator",
+      "check-type"]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/webpack.config.js
----------------------------------------------------------------------
diff --git a/js/webpack.config.js b/js/webpack.config.js
deleted file mode 100644
index aa123bd..0000000
--- a/js/webpack.config.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-var path = require('path');
-var UglifyJSPlugin = require('uglifyjs-webpack-plugin');
-
-module.exports = {
-  entry: {
-    'arrow': './src/arrow.ts',
-    'arrow.min': './src/arrow.ts'
-  },
-  output: {
-    path: path.resolve(__dirname, '_bundles'),
-    filename: '[name].js',
-    libraryTarget: 'umd',
-    library: 'arrow',
-    umdNamedDefine: true
-  },
-  resolve: {
-    extensions: ['.ts', '.js']
-  },
-  devtool: 'source-map',
-  plugins: [
-    new UglifyJSPlugin({
-      minimize: true,
-      sourceMap: true,
-      include: /\.min\.js$/
-    })
-  ],
-  module: {
-    loaders: [{
-      test: /\.ts$/,
-      loader: 'awesome-typescript-loader',
-      exclude: /node_modules/,
-      query: {
-        declaration: false
-      }
-    }]
-  }
-};


[2/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/reader/vector.ts
----------------------------------------------------------------------
diff --git a/js/src/reader/vector.ts b/js/src/reader/vector.ts
new file mode 100644
index 0000000..a3cd798
--- /dev/null
+++ b/js/src/reader/vector.ts
@@ -0,0 +1,271 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { flatbuffers } from 'flatbuffers';
+import { MessageBatch } from './message';
+import { Vector } from '../vector/vector';
+import * as Schema_ from '../format/Schema_generated';
+import { StructVector } from '../vector/struct';
+import { IteratorState, Dictionaries } from './arrow';
+import { DictionaryVector } from '../vector/dictionary';
+import { Utf8Vector, ListVector, FixedSizeListVector } from '../vector/list';
+import {
+    TypedArray, TypedArrayCtor, IntArray, FloatArray,
+    Int8Vector, Int16Vector, Int32Vector, Int64Vector,
+    Uint8Vector, Uint16Vector, Uint32Vector, Uint64Vector,
+    Float32Vector, Float64Vector, IndexVector, DateVector,
+} from '../vector/typed';
+
+import Int = Schema_.org.apache.arrow.flatbuf.Int;
+import Type = Schema_.org.apache.arrow.flatbuf.Type;
+import Field = Schema_.org.apache.arrow.flatbuf.Field;
+import Precision = Schema_.org.apache.arrow.flatbuf.Precision;
+import VectorType = Schema_.org.apache.arrow.flatbuf.VectorType;
+import VectorLayout = Schema_.org.apache.arrow.flatbuf.VectorLayout;
+import FixedSizeList = Schema_.org.apache.arrow.flatbuf.FixedSizeList;
+import FloatingPoint = Schema_.org.apache.arrow.flatbuf.FloatingPoint;
+import DictionaryEncoding = Schema_.org.apache.arrow.flatbuf.DictionaryEncoding;
+
+export function readVector(field: Field, batch: MessageBatch, state: IteratorState, dictionaries: Dictionaries) {
+    return readDictionaryVector(field, batch, state, dictionaries) ||
+                readTypedVector(field, batch, state, dictionaries);
+}
+
+function readTypedVector(field: Field, batch: MessageBatch, iterator: IteratorState, dictionaries: Dictionaries) {
+    let typeType = field.typeType(), readTyped = typedVectorReaders[typeType];
+    if (!readTyped) {
+        throw new Error('Unrecognized vector name "' + Type[typeType] + '" type "' + typeType + '"');
+    }
+    return readTyped(field, batch, iterator, dictionaries);
+}
+
+function readDictionaryVector(field: Field, batch: MessageBatch, iterator: IteratorState, dictionaries: Dictionaries) {
+    let encoding: DictionaryEncoding;
+    if (dictionaries && (encoding = field.dictionary())) {
+        let id = encoding.id().toFloat64().toString();
+        let fieldType =  encoding.indexType() ||
+            /* a dictionary index defaults to signed 32 bit int if unspecified */
+            { bitWidth: () => 32, isSigned: () => true };
+        let indexField = createSyntheticDictionaryIndexField(field, fieldType);
+        let index = readIntVector(indexField, batch, iterator, null, fieldType);
+        return DictionaryVector.create(field, index.length, index, dictionaries[id]);
+    }
+}
+
+const IntViews    = [Int8Array,    Int16Array,   Int32Array,   Int32Array  ];
+const Int32Views  = [Int32Array,   Int32Array,   Int32Array,   Int32Array  ];
+const UintViews   = [Uint8Array,   Uint16Array,  Uint32Array,  Uint32Array ];
+const Uint8Views  = [Uint8Array,   Uint8Array,   Uint8Array,   Uint8Array  ];
+const Uint32Views = [Uint32Array,  Uint32Array,  Uint32Array,  Uint32Array ];
+const FloatViews  = [Int8Array,    Int16Array,   Float32Array, Float64Array];
+
+const createIntDataViews = createDataView.bind(null, IntViews, null);
+const createUintDataViews = createDataView.bind(null, UintViews, null);
+const createDateDataViews = createDataView.bind(null, Uint32Views, null);
+const createFloatDataViews = createDataView.bind(null, FloatViews, null);
+const createNestedDataViews = createDataView.bind(null, Uint32Views, null);
+const createValidityDataViews = createDataView.bind(null, Uint8Views, null);
+const createUtf8DataViews = createDataView.bind(null, Uint8Views, Int32Views);
+
+const floatVectors = {
+    [Precision.SINGLE]: Float32Vector,
+    [Precision.DOUBLE]: Float64Vector
+};
+const intVectors = [
+    [/* unsigned */ Uint8Vector,   /* signed */ Int8Vector ],
+    [/* unsigned */ Uint16Vector,  /* signed */ Int16Vector],
+    [/* unsigned */ Uint32Vector,  /* signed */ Int32Vector],
+    [/* unsigned */ Uint64Vector,  /* signed */ Int64Vector]
+];
+
+function readIntVector(field: Field, batch: MessageBatch, iterator: IteratorState, dictionaries: Dictionaries, fieldType?: FieldType) {
+    let type = (fieldType || field.type(new Int()));
+    return type.isSigned() ?
+        read_IntVector(field, batch, iterator, dictionaries, type) :
+        readUintVector(field, batch, iterator, dictionaries, type);
+}
+
+const read_IntVector = readVectorLayout<number, IntArray>(createIntDataViews, createIntVector);
+const readUintVector = readVectorLayout<number, IntArray>(createUintDataViews, createIntVector);
+function createIntVector(field, length, data, validity, offsets, fieldType, batch, iterator, dictionaries) {
+    let type = fieldType || field.type(new Int()), bitWidth = type.bitWidth();
+    let Vector = valueForBitWidth(bitWidth, intVectors)[+type.isSigned()];
+    return Vector.create(field, length, validity, data || offsets);
+    // ---------------------- so this is kinda strange 👆:
+    // The dictionary encoded vectors I generated from sample mapd-core queries have the indicies' data buffers
+    // tagged as VectorType.OFFSET (0) in the field metadata. The current TS impl ignores buffers' layout type,
+    // and assumes the second buffer is the data for a NullableIntVector. Since we've been stricter about enforcing
+    // the Arrow spec while parsing, the IntVector's data buffer reads empty in this case. If so, fallback to using
+    // the offsets buffer as the data, since IntVectors don't have offsets.
+}
+
+const readFloatVector = readVectorLayout<number, FloatArray>(
+    createFloatDataViews,
+    (field, length, data, validity, offsets, fieldType, batch, iterator, dictionaries) => {
+        let type = field.type(new FloatingPoint());
+        let Vector = floatVectors[type.precision()];
+        return Vector.create(field, length, validity, data);
+    }
+);
+
+const readDateVector = readVectorLayout<Date, Uint32Array>(
+    createDateDataViews,
+    (field, length, data, validity, offsets, fieldType, batch, iterator, dictionaries) => {
+        return DateVector.create(field, length, validity, data);
+    }
+);
+
+const readUtf8Vector = readVectorLayout<string, Uint8Array>(
+    createUtf8DataViews,
+    (field, length, data, validity, offsets, fieldType, batch, iterator, dictionaries) => {
+        let offsetsAdjusted = new Int32Array(offsets.buffer, offsets.byteOffset, length + 1);
+        return Utf8Vector.create(
+            field, length, validity,
+            Uint8Vector.create(field, data.length, null, data),
+            IndexVector.create(field, length + 1, null, offsetsAdjusted)
+        );
+    }
+);
+
+const readListVector = readVectorLayout<any[], Uint32Array>(
+    createNestedDataViews,
+    (field, length, data, validity, offsets, fieldType, batch, iterator, dictionaries) => {
+        let offsetsAdjusted = new Int32Array(offsets.buffer, offsets.byteOffset, length + 1);
+        return ListVector.create(
+            field, length, validity,
+            readVector(field.children(0), batch, iterator, dictionaries),
+            IndexVector.create(field, length + 1, null, offsetsAdjusted)
+        );
+    }
+);
+
+const readFixedSizeListVector = readVectorLayout<any[], Uint32Array>(
+    createNestedDataViews,
+    (field, length, data, validity, offsets, fieldType, batch, iterator, dictionaries) => {
+        let size = field.type(new FixedSizeList()).listSize();
+        return FixedSizeListVector.create(
+            field, length, size, validity,
+            readVector(field.children(0), batch, iterator, dictionaries)
+        );
+    }
+);
+
+const readStructVector = readVectorLayout<any[], ArrayLike<any>>(
+    createNestedDataViews,
+    (field, length, data, validity, offsets, fieldType, batch, iterator, dictionaries) => {
+        let vectors: Vector<any>[] = [];
+        for (let i = -1, n = field.childrenLength(); ++i < n;) {
+            vectors[i] = readVector(field.children(i), batch, iterator, dictionaries);
+        }
+        return StructVector.create(field, length, validity, ...vectors);
+    }
+);
+
+const typedVectorReaders = {
+    [Type.Int]: readIntVector,
+    [Type.Date]: readDateVector,
+    [Type.List]: readListVector,
+    [Type.Utf8]: readUtf8Vector,
+    [Type.Struct_]: readStructVector,
+    [Type.FloatingPoint]: readFloatVector,
+    [Type.FixedSizeList]: readFixedSizeListVector,
+};
+
+type FieldType = { bitWidth(): number; isSigned(): boolean };
+type dataViewFactory<V = TypedArray> = (batch: MessageBatch, type: VectorType, bitWidth: number, offset: number, length: number) => V;
+type vectorFactory<TList, V = Vector<any>> = (field: Field,
+                                              length: number,
+                                              data: TList,
+                                              nulls: Uint8Array,
+                                              offsets: TypedArray,
+                                              fieldType: FieldType,
+                                              chunk: MessageBatch,
+                                              iterable: IteratorState,
+                                              dictionaries: Dictionaries) => V;
+
+function readVectorLayout<T, TList>(createDataView: dataViewFactory<TList>, createVector: vectorFactory<TList, Vector<T>>) {
+    return function readLayout(
+            field: Field,
+            chunk: MessageBatch,
+            iterator: IteratorState,
+            dictionaries: Dictionaries,
+            integerFieldType?: FieldType
+    ) {
+        let batch = chunk.data;
+        let layoutLength = field.layoutLength();
+        let node = batch.nodes(iterator.nodeIndex++);
+        let data: TList, offsets: any, validity: Uint8Array;
+        let type, bitWidth, bufferLength, nodeLength = node.length().low;
+        for (let i = -1; ++i < layoutLength;) {
+            let layout = field.layout(i);
+            let buffer = batch.buffers(iterator.bufferIndex++);
+            if ((type = layout.type()) === VectorType.TYPE ||
+                (bufferLength = buffer.length().low) <= 0  ||
+                (bitWidth = layout.bitWidth()) <= 0) {
+                continue;
+            } else if (type === VectorType.DATA) {
+                data = createDataView(chunk, type, bitWidth, buffer.offset().low, bufferLength);
+            } else if (type === VectorType.OFFSET) {
+                offsets = createDataView(chunk, type, bitWidth, buffer.offset().low, bufferLength);
+            } else if (node.nullCount().low > 0) {
+                validity = createValidityDataViews(chunk, type, bitWidth, buffer.offset().low, nodeLength);
+            }
+        }
+        return createVector(field, nodeLength, data, validity, offsets, integerFieldType, chunk, iterator, dictionaries);
+    };
+}
+
+function createDataView(
+    dataViews: TypedArrayCtor<any>[], offsetViews: TypedArrayCtor<any>[] | null,
+    batch: MessageBatch, type: VectorType, bitWidth: number, offset: number, length: number
+) {
+    const buffer = batch.bytes.buffer;
+    const byteLength = buffer.byteLength;
+    const byteOffset = batch.offset + offset;
+    const DataViewType = valueForBitWidth(bitWidth, type === VectorType.OFFSET && offsetViews || dataViews);
+    const dataViewLength = ((byteOffset + length) <= byteLength
+        ? length
+        : byteLength - byteOffset
+    ) / DataViewType['BYTES_PER_ELEMENT'];
+    return new DataViewType(buffer, byteOffset, dataViewLength);
+}
+
+function valueForBitWidth(bitWidth: number, values: any[]) {
+    return values[bitWidth >> 4] || values[3];
+}
+
+function createSyntheticDictionaryIndexField(field: Field, type: FieldType) {
+    let layouts = [];
+    let builder = new flatbuffers.Builder();
+    if (field.nullable()) {
+        VectorLayout.startVectorLayout(builder);
+        VectorLayout.addBitWidth(builder, 8);
+        VectorLayout.addType(builder, VectorType.VALIDITY);
+        builder.finish(VectorLayout.endVectorLayout(builder));
+        layouts.push(VectorLayout.getRootAsVectorLayout(builder.dataBuffer()));
+        builder = new flatbuffers.Builder();
+    }
+    VectorLayout.startVectorLayout(builder);
+    VectorLayout.addBitWidth(builder, type.bitWidth());
+    VectorLayout.addType(builder, VectorType.DATA);
+    builder.finish(VectorLayout.endVectorLayout(builder));
+    layouts.push(VectorLayout.getRootAsVectorLayout(builder.dataBuffer()));
+    return Object.create(field, {
+        layout: { value(i) { return layouts[i]; } },
+        layoutLength: { value() { return layouts.length; } }
+    });
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/table.ts
----------------------------------------------------------------------
diff --git a/js/src/table.ts b/js/src/table.ts
new file mode 100644
index 0000000..999bb24
--- /dev/null
+++ b/js/src/table.ts
@@ -0,0 +1,133 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { readBuffers } from './reader/arrow';
+import { StructVector } from './vector/struct';
+import { Vector, sliceToRangeArgs } from './vector/vector';
+
+export class Table implements Iterable<Map<string, any>> {
+    public length: number;
+    protected _columns: Vector<any>[];
+    protected _columnsMap: { [k: string]: Vector<any> };
+    static from(...bytes: Array<Uint8Array | Buffer | string>) {
+        let columns: Vector<any>[];
+        for (let vectors of readBuffers(...bytes)) {
+            columns = !columns ? vectors : columns.map((v, i) => v.concat(vectors[i]));
+        }
+        return new Table(columns);
+    }
+    static fromStruct(vector: StructVector) {
+        return new Table((<any> vector).vectors);
+    }
+    constructor(columns: Vector<any>[]) {
+        this._columns = columns || [];
+        this.length = Math.max(...this._columns.map((v) => v.length));
+        this._columnsMap = this._columns.reduce((map, vec) => {
+            return (map[vec.name] = vec) && map || map;
+        }, <any> {});
+    }
+    *[Symbol.iterator]() {
+        for (let cols = this._columns, i = -1, n = this.length; ++i < n;) {
+            yield rowAsMap(i, cols);
+        }
+    }
+    *rows(startRow?: number | boolean, endRow?: number | boolean, compact?: boolean) {
+        let start = startRow as number, end = endRow as number;
+        if (typeof startRow === 'boolean') {
+            compact = startRow;
+            start = end;
+            end = undefined;
+        } else if (typeof endRow === 'boolean') {
+            compact = endRow;
+            end = undefined;
+        }
+        let rowIndex = -1, { length } = this;
+        const [rowOffset, rowsTotal] = sliceToRangeArgs(length, start, end);
+        while (++rowIndex < rowsTotal) {
+            yield this.getRow((rowIndex + rowOffset) % length, compact);
+        }
+    }
+    *cols(startCol?: number, endCol?: number) {
+        for (const column of this._columns.slice(startCol, endCol)) {
+            yield column;
+        }
+    }
+    getRow(rowIndex: number, compact?: boolean) {
+        return (compact && rowAsArray || rowAsObject)(rowIndex, this._columns);
+    }
+    getCell(columnName: string, rowIndex: number) {
+        return this.getColumn(columnName).get(rowIndex);
+    }
+    getCellAt(columnIndex: number, rowIndex: number) {
+        return this.getColumnAt(columnIndex).get(rowIndex);
+    }
+    getColumn<T = any>(columnName: string) {
+        return this._columnsMap[columnName] as Vector<T>;
+    }
+    getColumnAt<T = any>(columnIndex: number) {
+        return this._columns[columnIndex] as Vector<T>;
+    }
+    toString({ index = false } = {}) {
+        const { length } = this;
+        if (length <= 0) { return ''; }
+        const maxColumnWidths = [];
+        const rows = new Array(length + 1);
+        rows[0] = this._columns.map((c) => c.name);
+        index && rows[0].unshift('Index');
+        for (let i = -1, n = rows.length - 1; ++i < n;) {
+            rows[i + 1] = this.getRow(i, true);
+            index && rows[i + 1].unshift(i);
+        }
+        // Pass one to convert to strings and count max column widths
+        for (let i = -1, n = rows.length; ++i < n;) {
+            const row = rows[i];
+            for (let j = -1, k = row.length; ++j < k;) {
+                const val = row[j] = `${row[j]}`;
+                maxColumnWidths[j] = !maxColumnWidths[j]
+                    ? val.length
+                    : Math.max(maxColumnWidths[j], val.length);
+            }
+        }
+        // Pass two to pad each one to max column width
+        for (let i = -1, n = rows.length; ++i < n;) {
+            const row = rows[i];
+            for (let j = -1, k = row.length; ++j < k;) {
+                row[j] = leftPad(row[j], ' ', maxColumnWidths[j]);
+            }
+            rows[i] = row.join(', ');
+        }
+        return rows.join('\n');
+    }
+}
+
+Table.prototype.length = 0;
+
+function leftPad(str, fill, n) {
+    return (new Array(n + 1).join(fill) + str).slice(-1 * n);
+}
+
+function rowAsMap(row: number, columns: Vector<any>[]) {
+    return columns.reduce((map, vector) => map.set(vector.name, vector.get(row)), new Map());
+}
+
+function rowAsObject(rowIndex: number, columns: Vector<any>[]) {
+    return columns.reduce((row, vector) => (row[vector.name] = vector.get(rowIndex)) && row || row, Object.create(null));
+}
+
+function rowAsArray(rowIndex: number, columns: Vector<any>[]) {
+    return columns.reduce((row, vector, columnIndex) => (row[columnIndex] = vector.get(rowIndex)) && row || row, new Array(columns.length));
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/types.ts
----------------------------------------------------------------------
diff --git a/js/src/types.ts b/js/src/types.ts
deleted file mode 100644
index c541098..0000000
--- a/js/src/types.ts
+++ /dev/null
@@ -1,597 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { org } from "./Arrow_generated";
-import { BitArray } from "./bitarray";
-
-import { TextDecoder } from "text-encoding";
-
-const Type = org.apache.arrow.flatbuf.Type;
-
-interface IArrayView {
-    slice(start: number, end: number): IArrayView;
-    toString(): string;
-}
-
-interface IViewConstructor<T extends IArrayView> {
-    BYTES_PER_ELEMENT: number;
-    new(buffer: any, offset: number, length: number): T;
-}
-
-export abstract class Vector {
-    /**
-     * Helper function for loading a VALIDITY buffer (for Nullable types)
-     *   bb: flatbuffers.ByteBuffer
-     *   buffer: org.apache.arrow.flatbuf.Buffer
-     */
-    public static loadValidityBuffer(bb, buffer): BitArray {
-        const arrayBuffer = bb.bytes_.buffer;
-        const offset = bb.bytes_.byteOffset + buffer.offset;
-        return new BitArray(arrayBuffer, offset, buffer.length * 8);
-    }
-
-    /**
-     * Helper function for loading an OFFSET buffer
-     *   buffer: org.apache.arrow.flatbuf.Buffer
-     */
-    public static loadOffsetBuffer(bb, buffer): Int32Array {
-        const arrayBuffer = bb.bytes_.buffer;
-        const offset  = bb.bytes_.byteOffset + buffer.offset;
-        const length = buffer.length / Int32Array.BYTES_PER_ELEMENT;
-        return new Int32Array(arrayBuffer, offset, length);
-    }
-
-    public field: any;
-    public name: string;
-    public length: number;
-    public nullCount: number;
-
-    constructor(field) {
-        this.field = field;
-        this.name = field.name();
-    }
-
-    /* Access datum at index i */
-    public abstract get(i);
-    /* Return array representing data in the range [start, end) */
-    public abstract slice(start: number, end: number);
-    /* Return array of child vectors, for container types */
-    public getChildVectors() {
-        return [];
-    }
-
-    /**
-     * Use recordBatch fieldNodes and Buffers to construct this Vector
-     *   bb: flatbuffers.ByteBuffer
-     *   node: org.apache.arrow.flatbuf.FieldNode
-     *   buffers: { offset: number, length: number }[]
-     */
-    public loadData(bb, node, buffers) {
-        this.length = node.length().low;
-        this.nullCount = node.nullCount().low;
-        this.loadBuffers(bb, node, buffers);
-    }
-
-    protected abstract loadBuffers(bb, node, buffers);
-}
-
-class SimpleVector<T extends IArrayView> extends Vector {
-    protected dataView: T;
-    private TypedArray: IViewConstructor<T>;
-
-    constructor(field, TypedArray: IViewConstructor<T>) {
-        super(field);
-        this.TypedArray = TypedArray;
-    }
-
-    public get(i) {
-        return this.dataView[i];
-    }
-
-    public getDataView() {
-        return this.dataView;
-    }
-
-    public toString() {
-        return this.dataView.toString();
-    }
-
-    public slice(start, end) {
-        return this.dataView.slice(start, end);
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.loadDataBuffer(bb, buffers[0]);
-    }
-
-    /*
-     * buffer: org.apache.arrow.flatbuf.Buffer
-     */
-    protected loadDataBuffer(bb, buffer) {
-        const arrayBuffer = bb.bytes_.buffer;
-        const offset  = bb.bytes_.byteOffset + buffer.offset;
-        const length = buffer.length / this.TypedArray.BYTES_PER_ELEMENT;
-        this.dataView = new this.TypedArray(arrayBuffer, offset, length);
-    }
-
-}
-
-class NullableSimpleVector<T extends IArrayView> extends SimpleVector<T> {
-    protected validityView: BitArray;
-
-    public get(i: number) {
-        if (this.validityView.get(i)) {
-            return this.dataView[i];
-        } else {
-          return null;
-        }
-    }
-
-    public getValidityVector() {
-        return this.validityView;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.validityView = Vector.loadValidityBuffer(bb, buffers[0]);
-        this.loadDataBuffer(bb, buffers[1]);
-    }
-}
-
-/* tslint:disable max-line-length */
-class Uint8Vector   extends SimpleVector<Uint8Array>   { constructor(field) { super(field, Uint8Array);   } }
-class Uint16Vector  extends SimpleVector<Uint16Array>  { constructor(field) { super(field, Uint16Array);  } }
-class Uint32Vector  extends SimpleVector<Uint32Array>  { constructor(field) { super(field, Uint32Array);  } }
-class Int8Vector    extends SimpleVector<Uint8Array>   { constructor(field) { super(field, Uint8Array);   } }
-class Int16Vector   extends SimpleVector<Uint16Array>  { constructor(field) { super(field, Uint16Array);  } }
-class Int32Vector   extends SimpleVector<Uint32Array>  { constructor(field) { super(field, Uint32Array);  } }
-class Float32Vector extends SimpleVector<Float32Array> { constructor(field) { super(field, Float32Array); } }
-class Float64Vector extends SimpleVector<Float64Array> { constructor(field) { super(field, Float64Array); } }
-
-class NullableUint8Vector   extends NullableSimpleVector<Uint8Array>   { constructor(field) { super(field, Uint8Array);   } }
-class NullableUint16Vector  extends NullableSimpleVector<Uint16Array>  { constructor(field) { super(field, Uint16Array);  } }
-class NullableUint32Vector  extends NullableSimpleVector<Uint32Array>  { constructor(field) { super(field, Uint32Array);  } }
-class NullableInt8Vector    extends NullableSimpleVector<Uint8Array>   { constructor(field) { super(field, Uint8Array);   } }
-class NullableInt16Vector   extends NullableSimpleVector<Uint16Array>  { constructor(field) { super(field, Uint16Array);  } }
-class NullableInt32Vector   extends NullableSimpleVector<Uint32Array>  { constructor(field) { super(field, Uint32Array);  } }
-class NullableFloat32Vector extends NullableSimpleVector<Float32Array> { constructor(field) { super(field, Float32Array); } }
-class NullableFloat64Vector extends NullableSimpleVector<Float64Array> { constructor(field) { super(field, Float64Array); } }
-/* tslint:enable max-line-length */
-
-class Uint64Vector extends SimpleVector<Uint32Array>  {
-    constructor(field) {
-        super(field, Uint32Array);
-    }
-
-    public get(i: number) {
-        return { low: this.dataView[i * 2], high: this.dataView[(i * 2) + 1] };
-    }
-}
-
-class NullableUint64Vector extends NullableSimpleVector<Uint32Array>  {
-    constructor(field) {
-        super(field, Uint32Array);
-    }
-
-    public get(i: number) {
-        if (this.validityView.get(i)) {
-            return { low: this.dataView[i * 2], high: this.dataView[(i * 2) + 1] };
-        } else {
-          return null;
-        }
-    }
-}
-
-class Int64Vector extends NullableSimpleVector<Uint32Array>  {
-    constructor(field) {
-        super(field, Uint32Array);
-    }
-
-    public get(i: number) {
-        return { low: this.dataView[i * 2], high: this.dataView[(i * 2) + 1] };
-    }
-}
-
-class NullableInt64Vector extends NullableSimpleVector<Uint32Array>  {
-    constructor(field) {
-        super(field, Uint32Array);
-    }
-
-    public get(i: number) {
-        if (this.validityView.get(i)) {
-            return { low: this.dataView[i * 2], high: this.dataView[(i * 2) + 1] };
-        } else {
-          return null;
-        }
-    }
-}
-
-class DateVector extends SimpleVector<Uint32Array> {
-    constructor(field) {
-        super(field, Uint32Array);
-    }
-
-    public get(i) {
-        return new Date(super.get(2 * i + 1) * Math.pow(2, 32) + super.get(2 * i));
-    }
-}
-
-class NullableDateVector extends DateVector {
-    private validityView: BitArray;
-
-    public get(i) {
-        if (this.validityView.get(i)) {
-            return super.get(i);
-        } else {
-            return null;
-        }
-    }
-
-    public getValidityVector() {
-        return this.validityView;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.validityView = Vector.loadValidityBuffer(bb, buffers[0]);
-        this.loadDataBuffer(bb, buffers[1]);
-    }
-}
-
-class Utf8Vector extends SimpleVector<Uint8Array> {
-    private static decoder: TextDecoder = new TextDecoder("utf8");
-
-    protected offsetView: Int32Array;
-
-    constructor(field) {
-        super(field, Uint8Array);
-    }
-
-    public get(i) {
-        return Utf8Vector.decoder.decode(this.dataView.slice(this.offsetView[i], this.offsetView[i + 1]));
-    }
-
-    public slice(start: number, end: number) {
-        const result: string[] = [];
-        for (let i: number = start; i < end; i++) {
-            result.push(this.get(i));
-        }
-        return result;
-    }
-
-    public getOffsetView() {
-        return this.offsetView;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.offsetView = Vector.loadOffsetBuffer(bb, buffers[0]);
-        this.loadDataBuffer(bb, buffers[1]);
-    }
-}
-
-class NullableUtf8Vector extends Utf8Vector {
-    private validityView: BitArray;
-
-    public get(i) {
-        if (this.validityView.get(i)) {
-            return super.get(i);
-        } else {
-            return null;
-        }
-    }
-
-    public getValidityVector() {
-        return this.validityView;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.validityView = Vector.loadValidityBuffer(bb, buffers[0]);
-        this.offsetView = Vector.loadOffsetBuffer(bb, buffers[1]);
-        this.loadDataBuffer(bb, buffers[2]);
-    }
-}
-
-// Nested Types
-class ListVector extends Uint32Vector {
-    private dataVector: Vector;
-
-    constructor(field, dataVector: Vector) {
-        super(field);
-        this.dataVector = dataVector;
-    }
-
-    public getChildVectors() {
-        return [this.dataVector];
-    }
-
-    public get(i) {
-        const offset = super.get(i);
-        if (offset === null) {
-            return null;
-        }
-        const nextOffset = super.get(i + 1);
-        return this.dataVector.slice(offset, nextOffset);
-    }
-
-    public toString() {
-        return "length: " + (this.length);
-    }
-
-    public slice(start: number, end: number) {
-        const result = [];
-        for (let i = start; i < end; i++) {
-            result.push(this.get(i));
-        }
-        return result;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        super.loadBuffers(bb, node, buffers);
-        this.length -= 1;
-    }
-}
-
-class NullableListVector extends ListVector {
-    private validityView: BitArray;
-
-    public get(i) {
-        if (this.validityView.get(i)) {
-            return super.get(i);
-        } else {
-            return null;
-        }
-    }
-
-    public getValidityVector() {
-        return this.validityView;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.validityView = Vector.loadValidityBuffer(bb, buffers[0]);
-        this.loadDataBuffer(bb, buffers[1]);
-        this.length -= 1;
-    }
-}
-
-class FixedSizeListVector extends Vector {
-    public size: number;
-    private dataVector: Vector;
-
-    constructor(field, size: number, dataVector: Vector) {
-        super(field);
-        this.size = size;
-        this.dataVector = dataVector;
-    }
-
-    public getChildVectors() {
-        return [this.dataVector];
-    }
-
-    public get(i: number) {
-        return this.dataVector.slice(i * this.size, (i + 1) * this.size);
-    }
-
-    public slice(start: number, end: number) {
-        const result = [];
-        for (let i = start; i < end; i++) {
-            result.push(this.get(i));
-        }
-        return result;
-    }
-
-    public getListSize() {
-        return this.size;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        // no buffers to load
-    }
-}
-
-class NullableFixedSizeListVector extends FixedSizeListVector {
-    private validityView: BitArray;
-
-    public get(i: number) {
-        if (this.validityView.get(i)) {
-            return super.get(i);
-        } else {
-            return null;
-        }
-    }
-
-    public getValidityVector() {
-        return this.validityView;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.validityView = Vector.loadValidityBuffer(bb, buffers[0]);
-    }
-}
-
-class StructVector extends Vector {
-    private validityView: BitArray;
-    private vectors: Vector[];
-
-    constructor(field, vectors: Vector[]) {
-        super(field);
-        this.vectors = vectors;
-    }
-
-    public getChildVectors() {
-        return this.vectors;
-    }
-
-    public get(i: number) {
-        if (this.validityView.get(i)) {
-          return this.vectors.map((v: Vector) => v.get(i));
-        } else {
-            return null;
-        }
-    }
-
-    public slice(start: number, end: number) {
-        const result = [];
-        for (let i = start; i < end; i++) {
-            result.push(this.get(i));
-        }
-        return result;
-    }
-
-    public getValidityVector() {
-        return this.validityView;
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.validityView = Vector.loadValidityBuffer(bb, buffers[0]);
-    }
-}
-
-class DictionaryVector extends Vector {
-    private indices: Vector;
-    private dictionary: Vector;
-
-    constructor(field, indices: Vector, dictionary: Vector) {
-        super(field);
-        this.indices = indices;
-        this.dictionary = dictionary;
-    }
-
-    public get(i) {
-        const encoded = this.indices.get(i);
-        if (encoded == null) {
-            return null;
-        } else {
-            return this.dictionary.get(encoded);
-        }
-    }
-
-    /** Get the dictionary encoded value */
-    public getEncoded(i) {
-        return this.indices.get(i);
-    }
-
-    public slice(start, end) {
-        return this.indices.slice(start, end); // TODO decode
-    }
-
-    public getChildVectors() {
-        return this.indices.getChildVectors();
-    }
-
-    /** Get the index (encoded) vector */
-    public getIndexVector() {
-        return this.indices;
-    }
-
-    /** Get the dictionary vector */
-    public getDictionaryVector() {
-        return this.dictionary;
-    }
-
-    public toString() {
-        return this.indices.toString();
-    }
-
-    protected loadBuffers(bb, node, buffers) {
-        this.indices.loadData(bb, node, buffers);
-    }
-}
-
-export function vectorFromField(field, dictionaries): Vector {
-    const dictionary = field.dictionary();
-    const nullable = field.nullable();
-    if (dictionary == null) {
-        const typeType = field.typeType();
-        if (typeType === Type.List) {
-            const dataVector = vectorFromField(field.children(0), dictionaries);
-            return nullable ? new NullableListVector(field, dataVector) : new ListVector(field, dataVector);
-        } else if (typeType === Type.FixedSizeList) {
-            const dataVector = vectorFromField(field.children(0), dictionaries);
-            const size = field.type(new org.apache.arrow.flatbuf.FixedSizeList()).listSize();
-            if (nullable) {
-              return new NullableFixedSizeListVector(field, size, dataVector);
-            } else {
-              return new FixedSizeListVector(field, size, dataVector);
-            }
-         } else if (typeType === Type.Struct_) {
-            const vectors: Vector[] = [];
-            for (let i: number = 0; i < field.childrenLength(); i++) {
-                vectors.push(vectorFromField(field.children(i), dictionaries));
-            }
-            return new StructVector(field, vectors);
-        } else {
-            if (typeType === Type.Int) {
-                const type = field.type(new org.apache.arrow.flatbuf.Int());
-                return _createIntVector(field, type.bitWidth(), type.isSigned(), nullable);
-            } else if (typeType === Type.FloatingPoint) {
-                const precision = field.type(new org.apache.arrow.flatbuf.FloatingPoint()).precision();
-                if (precision === org.apache.arrow.flatbuf.Precision.SINGLE) {
-                    return nullable ? new NullableFloat32Vector(field) : new Float32Vector(field);
-                } else if (precision === org.apache.arrow.flatbuf.Precision.DOUBLE) {
-                    return nullable ? new NullableFloat64Vector(field) : new Float64Vector(field);
-                } else {
-                    throw new Error("Unimplemented FloatingPoint precision " + precision);
-                }
-            } else if (typeType === Type.Utf8) {
-                return nullable ? new NullableUtf8Vector(field) : new Utf8Vector(field);
-            } else if (typeType === Type.Date) {
-                return nullable ? new NullableDateVector(field) : new DateVector(field);
-            } else {
-                throw new Error("Unimplemented type " + typeType);
-            }
-        }
-    } else {
-        // determine arrow type - default is signed 32 bit int
-        const type = dictionary.indexType();
-        let bitWidth = 32;
-        let signed = true;
-        if (type != null) {
-            bitWidth = type.bitWidth();
-            signed = type.isSigned();
-        }
-        const indices = _createIntVector(field, bitWidth, signed, nullable);
-        return new DictionaryVector(field, indices, dictionaries[dictionary.id().toFloat64().toString()]);
-    }
-}
-
-function _createIntVector(field, bitWidth, signed, nullable) {
-    if (bitWidth === 64) {
-        if (signed) {
-            return nullable ? new NullableInt64Vector(field) : new Int64Vector(field);
-        } else {
-            return nullable ? new NullableUint64Vector(field) : new Uint64Vector(field);
-        }
-    } else if (bitWidth === 32) {
-        if (signed) {
-            return nullable ? new NullableInt32Vector(field) : new Int32Vector(field);
-        } else {
-            return nullable ? new NullableUint32Vector(field) : new Uint32Vector(field);
-        }
-    } else if (bitWidth === 16) {
-        if (signed) {
-            return nullable ? new NullableInt16Vector(field) : new Int16Vector(field);
-        } else {
-            return nullable ? new NullableUint16Vector(field) : new Uint16Vector(field);
-        }
-    } else if (bitWidth === 8) {
-        if (signed) {
-            return nullable ? new NullableInt8Vector(field) : new Int8Vector(field);
-        } else {
-            return nullable ? new NullableUint8Vector(field) : new Uint8Vector(field);
-        }
-    } else {
-         throw new Error("Unimplemented Int bit width " + bitWidth);
-    }
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/vector/dictionary.ts
----------------------------------------------------------------------
diff --git a/js/src/vector/dictionary.ts b/js/src/vector/dictionary.ts
new file mode 100644
index 0000000..de811ea
--- /dev/null
+++ b/js/src/vector/dictionary.ts
@@ -0,0 +1,51 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Vector } from './vector';
+
+export class DictionaryVector<T> extends Vector<T> {
+    protected data: Vector<T>;
+    protected keys: Vector<number>;
+    constructor(index: Vector<number>, dictionary: Vector<T>) {
+        super();
+        this.keys = index;
+        this.data = dictionary;
+        this.length = index && index.length || 0;
+    }
+    index(index: number) {
+        return this.keys.get(index);
+    }
+    value(index: number) {
+        return this.data.get(index);
+    }
+    get(index: number) {
+        return this.value(this.index(index));
+    }
+    concat(vector: DictionaryVector<T>) {
+        return DictionaryVector.from(this,
+            this.length + vector.length,
+            this.keys.concat(vector.keys),
+            this.data
+        );
+    }
+    *[Symbol.iterator]() {
+        let { data } = this;
+        for (const loc of this.keys) {
+            yield data.get(loc);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/vector/list.ts
----------------------------------------------------------------------
diff --git a/js/src/vector/list.ts b/js/src/vector/list.ts
new file mode 100644
index 0000000..7360d96
--- /dev/null
+++ b/js/src/vector/list.ts
@@ -0,0 +1,108 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Vector } from './vector';
+import { TextDecoder } from 'text-encoding';
+import { IndexVector, BitVector, ValidityArgs } from './typed';
+
+export class ListVectorBase<T> extends Vector<T> {
+    protected values: Vector<T>;
+    protected offsets: IndexVector;
+    constructor(validity: ValidityArgs, values: Vector<any>, offsets: IndexVector) {
+        super();
+        this.values = values;
+        this.offsets = offsets;
+        validity && (this.validity = BitVector.from(validity));
+    }
+    get(index: number) {
+        let batch, from, to, { offsets } = this;
+        if (!this.validity.get(index) ||
+            /* return null if `to` is null */
+            ((to = offsets.get(index + 1)) === null) || !(
+            /*
+            return null if `batch` is less than than 0. this check is placed
+            second to avoid creating the [from, batch] tuple if `to` is null
+            */
+            ([from, batch] = offsets.get(index, true) as number[]) && batch > -1)) {
+            return null;
+        }
+        return this.values.slice(from, to, batch) as any;
+    }
+    concat(vector: ListVectorBase<T>) {
+        return (this.constructor as typeof ListVectorBase).from(this,
+            this.length + vector.length,
+            this.validity.concat(vector.validity),
+            this.values.concat(vector.values),
+            this.offsets.concat(vector.offsets)
+        );
+    }
+    *[Symbol.iterator]() {
+        let v, r1, r2, { values } = this;
+        let it = this.offsets[Symbol.iterator]();
+        let iv = this.validity[Symbol.iterator]();
+        while (!(v = iv.next()).done && !(r1 = it.next()).done && !(r2 = it.next()).done) {
+            yield !v.value ? null : values.slice(r1.value[0], r2.value, r1.value[1]) as any;
+        }
+    }
+}
+
+export class ListVector<T> extends ListVectorBase<T[]> {}
+export class Utf8Vector extends ListVectorBase<string> {
+    protected static decoder = new TextDecoder(`utf-8`);
+    get(index: number) {
+        let chars = super.get(index) as any;
+        return chars ? Utf8Vector.decoder.decode(chars) : null;
+    }
+    *[Symbol.iterator]() {
+        let decoder = Utf8Vector.decoder;
+        for (const chars of super[Symbol.iterator]()) {
+            yield !chars ? null : decoder.decode(chars);
+        }
+    }
+}
+
+export class FixedSizeListVector<T> extends Vector<T[]> {
+    protected size: number;
+    protected values: Vector<T>;
+    constructor(size: number, validity: ValidityArgs, values: Vector<T>) {
+        super();
+        this.values = values;
+        this.size = Math.abs(size | 0) || 1;
+        validity && (this.validity = BitVector.from(validity));
+    }
+    get(index: number) {
+        return !this.validity.get(index) ? null : this.values.slice(
+            this.size * index, this.size * (index + 1)
+        ) as T[];
+    }
+    concat(vector: FixedSizeListVector<T>) {
+        return FixedSizeListVector.from(this,
+            this.length + vector.length,
+            this.size,
+            this.validity.concat(vector.validity),
+            this.values.concat(vector.values)
+        );
+    }
+    *[Symbol.iterator]() {
+        let v, i = -1;
+        let { size, length, values } = this;
+        let iv = this.validity[Symbol.iterator]();
+        while (!(v = iv.next()).done && ++i < length) {
+            yield !v.value ? null : values.slice(size * i, size * (i + 1)) as T[];
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/vector/struct.ts
----------------------------------------------------------------------
diff --git a/js/src/vector/struct.ts b/js/src/vector/struct.ts
new file mode 100644
index 0000000..e59ac91
--- /dev/null
+++ b/js/src/vector/struct.ts
@@ -0,0 +1,39 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Vector } from './vector';
+import { BitVector, ValidityArgs } from './typed';
+
+export class StructVector extends Vector<any[]> {
+    protected vectors: Vector<any>[];
+    constructor(validity: ValidityArgs, ...vectors: Vector<any>[]) {
+        super();
+        this.vectors = vectors;
+        this.length = Math.max(0, ...vectors.map((v) => v.length));
+        validity && (this.validity = BitVector.from(validity));
+    }
+    get(index: number) {
+        return this.validity.get(index) ? this.vectors.map((v) => v.get(index)) : null;
+    }
+    concat(vector: StructVector) {
+        return StructVector.from(this,
+            this.length + vector.length,
+            this.validity.concat(vector.validity),
+            ...this.vectors.map((v, i) => v.concat(vector.vectors[i]))
+        );
+    }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/vector/typed.ts
----------------------------------------------------------------------
diff --git a/js/src/vector/typed.ts b/js/src/vector/typed.ts
new file mode 100644
index 0000000..b38812e
--- /dev/null
+++ b/js/src/vector/typed.ts
@@ -0,0 +1,326 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Vector } from './vector';
+import { flatbuffers } from 'flatbuffers';
+
+import Long = flatbuffers.Long;
+
+export type VArray<T = any> = {
+    [k: number]: T; length: number;
+    constructor: VArrayCtor<VArray<T>>;
+};
+
+export type VArrayCtor<VArray> = {
+    readonly prototype: VArray;
+    BYTES_PER_ELEMENT?: number;
+    new(...args: any[]): VArray;
+};
+
+export class VirtualVector<T, TArrayType = VArray<T>> extends Vector<T> {
+    protected lists: TArrayType[];
+    protected _arrayType: VArrayCtor<TArrayType>;
+    public get arrayType() { return this._arrayType; }
+    constructor(...lists: TArrayType[]) {
+        super();
+        this.lists = lists.filter(Boolean);
+    }
+    get(index: number): T {
+        /* inlined `findVirtual` impl */
+        let rows, length, lists = this.lists;
+        for (let batch = -1;
+            (rows = lists[++batch]) &&
+            (length = rows.length) <= index &&
+            0 <= (index -= length);) {}
+        return rows && -1 < index ? rows[index] : null;
+    }
+    protected range(from: number, total: number, batch?: number) {
+        /* inlined `findVirtual` impl */
+        let rows, local = from, length;
+        let { lists, _arrayType } = this;
+        for (batch = (batch || 0) - 1;
+            (rows = lists[++batch]) &&
+            (length = rows.length) <= local &&
+            0 <= (local -= length);) {}
+        if (rows && local > -1) {
+            let index = 0, listsLength = lists.length;
+            let set: any = Array.isArray(rows) ? arraySet : typedArraySet;
+            let slice = _arrayType['prototype']['subarray'] || _arrayType['prototype']['slice'];
+            let source = slice.call(rows, local, local + total), target = source;
+            // Perf optimization: if the first slice contains all the values we're looking for,
+            // we don't have to copy values to a target Array. If we're slicing a TypedArray,
+            // this is a significant improvement as we avoid the memcpy 🎉
+            if (source.length < total) {
+                target = new _arrayType(total);
+                while ((index = set(source, target, index)) < total) {
+                    rows = lists[batch = ((batch + 1) % listsLength)];
+                    source = slice.call(rows, 0, Math.min(rows.length, total - index));
+                }
+            }
+            return target as any;
+        }
+        return new _arrayType(0);
+    }
+    *[Symbol.iterator]() {
+        let index = -1, { lists, length } = this;
+        for (let outer = -1, n = lists.length; ++outer < n;) {
+            let list = lists[outer] as any;
+            for (let inner = -1, k = list.length; ++index < length && ++inner < k;) {
+                yield list[inner];
+            }
+        }
+    }
+}
+
+export type ValidityArgs = Vector<boolean> | Uint8Array;
+export class BitVector extends VirtualVector<boolean, Uint8Array> {
+    static constant: Vector<boolean> = new (class ValidVector extends Vector<boolean> {
+        get() { return true; }
+        *[Symbol.iterator]() {
+            do { yield true; } while (true);
+        }
+    })();
+    static from(src: any) {
+        return src instanceof BitVector   ? src
+             : src === BitVector.constant ? src
+             : src instanceof Uint8Array       ? new BitVector(src)
+             : src instanceof Array            ? new BitVector(BitVector.pack(src))
+             : src instanceof Vector           ? new BitVector(BitVector.pack(src))
+                                               : BitVector.constant as Vector<any>;
+    }
+    static pack(values: Iterable<any>) {
+        let xs = [], n, i = 0;
+        let bit = 0, byte = 0;
+        for (const value of values) {
+            value && (byte |= 1 << bit);
+            if (++bit === 8) {
+                xs[i++] = byte;
+                byte = bit = 0;
+            }
+        }
+        if (i === 0 || bit > 0) { xs[i++] = byte; }
+        if (i % 8 && (n = n = i + 8 - i % 8)) {
+            do { xs[i] = 0; } while (++i < n);
+        }
+        return new Uint8Array(xs);
+    }
+    constructor(...lists: Uint8Array[]) {
+        super(...lists);
+        this.length = this.lists.reduce((l, xs) => l + xs['length'], 0);
+    }
+    get(index: number) {
+        /* inlined `findVirtual` impl */
+        let rows, length, lists = this.lists;
+        for (let batch = -1;
+            (rows = lists[++batch]) &&
+            (length = rows.length) <= index &&
+            0 <= (index -= length);) {}
+        return !(!rows || index < 0 || (rows[index >> 3 | 0] & 1 << index % 8) === 0);
+    }
+    set(index: number, value: boolean) {
+        /* inlined `findVirtual` impl */
+        let rows, length, lists = this.lists;
+        for (let batch = -1;
+            (rows = lists[++batch]) &&
+            (length = rows.length) <= index &&
+            0 <= (index -= length);) {}
+        if (rows && index > -1) {
+            value
+                ? (rows[index >> 3 | 0] |=  (1 << (index % 8)))
+                : (rows[index >> 3 | 0] &= ~(1 << (index % 8)));
+        }
+    }
+    concat(vector: BitVector) {
+        return new BitVector(...this.lists, ...vector.lists);
+    }
+    *[Symbol.iterator]() {
+        for (const byte of super[Symbol.iterator]()) {
+            for (let i = -1; ++i < 8;) {
+                yield (byte & 1 << i) !== 0;
+            }
+        }
+    }
+}
+
+export class TypedVector<T, TArrayType> extends VirtualVector<T, TArrayType> {
+    constructor(validity: ValidityArgs, ...lists: TArrayType[]) {
+        super(...lists);
+        validity && (this.validity = BitVector.from(validity));
+    }
+    concat(vector: TypedVector<T, TArrayType>) {
+        return (this.constructor as typeof TypedVector).from(this,
+            this.length + vector.length,
+            this.validity.concat(vector.validity),
+            ...this.lists, ...vector.lists
+        );
+    }
+}
+
+export class DateVector extends TypedVector<Date, Uint32Array> {
+    get(index: number) {
+        return !this.validity.get(index) ? null : new Date(
+            Math.pow(2, 32) *
+                <any> super.get(2 * index + 1) +
+                <any> super.get(2 * index)
+        );
+    }
+    *[Symbol.iterator]() {
+        let v, low, high;
+        let it = super[Symbol.iterator]();
+        let iv = this.validity[Symbol.iterator]();
+        while (!(v = iv.next()).done && !(low = it.next()).done && !(high = it.next()).done) {
+            yield !v.value ? null : new Date(Math.pow(2, 32) * high.value + low.value);
+        }
+    }
+}
+
+export class IndexVector extends TypedVector<number | number[], Int32Array> {
+    get(index: number, returnWithBatchIndex = false) {
+        /* inlined `findVirtual` impl */
+        let rows, length, batch = -1, lists = this.lists;
+        for (;
+            (rows = lists[++batch]) &&
+            (length = rows.length) <= index &&
+            0 <= (index -= length);) {}
+        return !returnWithBatchIndex
+            ? (rows && -1 < index ? rows[index + batch] : null) as number
+            : (rows && -1 < index ? [rows[index + batch], batch] : [0, -1]) as number[];
+    }
+    *[Symbol.iterator]() {
+        // Alternate between iterating a tuple of [from, batch], and to. The from
+        // and to values are relative to the record batch they're defined in, so
+        // `ListVectorBase` needs to know the right batch to read.
+        let xs = new Int32Array(2), { lists } = this;
+        for (let i = -1, n = lists.length; ++i < n;) {
+            let list = lists[i] as any;
+            for (let j = -1, k = list.length - 1; ++j < k;) {
+                xs[1] = i;
+                xs[0] = list[j];
+                yield xs;
+                yield list[j + 1];
+            }
+        }
+    }
+}
+
+export class ByteVector<TList> extends TypedVector<number, TList> {
+    get(index: number) {
+        return this.validity.get(index) ? super.get(index) : null;
+    }
+    *[Symbol.iterator]() {
+        let v, r, { validity } = this;
+        let it = super[Symbol.iterator]();
+        // fast path the case of no nulls
+        if (validity === BitVector.constant) {
+            yield* it;
+        } else {
+            let iv = validity[Symbol.iterator]();
+            while (!(v = iv.next()).done && !(r = it.next()).done) {
+                yield !v.value ? null : r.value;
+            }
+        }
+    }
+}
+
+export class LongVector<TList> extends TypedVector<Long, TList> {
+    get(index: number) {
+        return !this.validity.get(index) ? null : new Long(
+            <any> super.get(index * 2),     /* low */
+            <any> super.get(index * 2 + 1) /* high */
+        );
+    }
+    *[Symbol.iterator]() {
+        let v, low, high;
+        let it = super[Symbol.iterator]();
+        let iv = this.validity[Symbol.iterator]();
+        while (!(v = iv.next()).done && !(low = it.next()).done && !(high = it.next()).done) {
+            yield !v.value ? null : new Long(low.value, high.value);
+        }
+    }
+}
+
+export class Int8Vector    extends ByteVector<Int8Array>    {}
+export class Int16Vector   extends ByteVector<Int16Array>   {}
+export class Int32Vector   extends ByteVector<Int32Array>   {}
+export class Int64Vector   extends LongVector<Int32Array>   {}
+export class Uint8Vector   extends ByteVector<Uint8Array>   {}
+export class Uint16Vector  extends ByteVector<Uint16Array>  {}
+export class Uint32Vector  extends ByteVector<Uint32Array>  {}
+export class Uint64Vector  extends LongVector<Uint32Array>  {}
+export class Float32Vector extends ByteVector<Float32Array> {}
+export class Float64Vector extends ByteVector<Float64Array> {}
+
+LongVector.prototype.stride = 2;
+(Vector.prototype as any).lists = [];
+(Vector.prototype as any).validity = BitVector.constant;
+(VirtualVector.prototype as any)._arrayType = Array;
+(BitVector.prototype as any)._arrayType = Uint8Array;
+(Int8Vector.prototype as any)._arrayType = Int8Array;
+(Int16Vector.prototype as any)._arrayType = Int16Array;
+(Int32Vector.prototype as any)._arrayType = Int32Array;
+(Int64Vector.prototype as any)._arrayType = Int32Array;
+(Uint8Vector.prototype as any)._arrayType = Uint8Array;
+(Uint16Vector.prototype as any)._arrayType = Uint16Array;
+(Uint32Vector.prototype as any)._arrayType = Uint32Array;
+(Uint64Vector.prototype as any)._arrayType = Uint32Array;
+(DateVector.prototype as any)._arrayType = Uint32Array;
+(IndexVector.prototype as any)._arrayType = Int32Array;
+(Float32Vector.prototype as any)._arrayType = Float32Array;
+(Float64Vector.prototype as any)._arrayType = Float64Array;
+
+function arraySet<T>(source: Array<T>, target: Array<T>, index: number) {
+    for (let i = 0, n = source.length; i < n;) {
+        target[index++] = source[i++];
+    }
+    return index;
+}
+
+function typedArraySet(source: TypedArray, target: TypedArray, index: number) {
+    return target.set(source, index) || index + source.length;
+}
+
+// Rather than eat the iterator cost, we've inlined this function into the relevant functions
+// function* findVirtual<TList>(index: number, lists: TList[], batch?: number) {
+//     let rows, length;
+//     for (batch = (batch || 0) - 1;
+//         (rows = lists[++batch]) &&
+//         (length = rows.length) <= index &&
+//         0 <= (index -= length);) {}
+//     return rows && -1 < index ? yield [rows, index, batch] : null;
+// }
+
+export type TypedArrayCtor<T extends TypedArray> = {
+    readonly prototype: T;
+    readonly BYTES_PER_ELEMENT: number;
+    new(length: number): T;
+    new(array: ArrayLike<number>): T;
+    new(buffer: ArrayBufferLike, byteOffset?: number, length?: number): T;
+};
+
+export type FloatArray = Float32Array | Float64Array;
+export type IntArray = Int8Array | Int16Array | Int32Array | Uint8ClampedArray | Uint8Array | Uint16Array | Uint32Array;
+
+export type TypedArray = (
+            Int8Array        |
+            Uint8Array       |
+            Int16Array       |
+            Int32Array       |
+            Uint16Array      |
+            Uint32Array      |
+            Float32Array     |
+            Float64Array     |
+            Uint8ClampedArray);

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/vector/vector.ts
----------------------------------------------------------------------
diff --git a/js/src/vector/vector.ts b/js/src/vector/vector.ts
new file mode 100644
index 0000000..1f39f87
--- /dev/null
+++ b/js/src/vector/vector.ts
@@ -0,0 +1,91 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import * as Schema_ from '../format/Schema_generated';
+export import Type = Schema_.org.apache.arrow.flatbuf.Type;
+export import Field = Schema_.org.apache.arrow.flatbuf.Field;
+
+export function sliceToRangeArgs(length: number, start: number, end?: number) {
+    let total = length, from = start || 0;
+    let to = end === end && typeof end == 'number' ? end : total;
+    if (to < 0) { to = total + to; }
+    if (from < 0) { from = total - (from * -1) % total; }
+    if (to < from) { from = to; to = start; }
+    total = !isFinite(total = (to - from)) || total < 0 ? 0 : total;
+    return [from, total];
+}
+
+export class Vector<T> implements Iterable<T> {
+    static defaultName = '';
+    static defaultProps = new Map();
+    static defaultType = Type[Type.NONE];
+    static create<T = any>(field: Field, length: number, ...args: any[]) {
+        let vector = new this<T>(...args), m;
+        vector.length = length;
+        vector.name = field.name();
+        vector.type = Type[field.typeType()];
+        if ((m = field.customMetadataLength()) > 0) {
+            let entry, i = 0, data = vector.props = new Map();
+            do {
+                entry = field.customMetadata(i);
+                data[entry.key()] = entry.value();
+            } while (++i < m);
+        }
+        return vector;
+    }
+    static from<T = any>(source: Vector<T>, length: number, ...args: any[]) {
+        let vector = new this<T>(...args);
+        vector.length = length;
+        source.name !== Vector.defaultName && (vector.name = source.name);
+        source.type !== Vector.defaultType && (vector.type = source.type);
+        source.props !== Vector.defaultProps && (vector.props = source.props);
+        return vector;
+    }
+    public name: string;
+    public type: string;
+    public length: number;
+    public stride: number;
+    public props: Map<PropertyKey, any>;
+    protected validity: Vector<boolean>;
+    get(index: number): T { return null; }
+    concat(vector: Vector<T>) { return vector; }
+    slice<R = T>(start?: number, end?: number, batch?: number) {
+        const { stride } = this;
+        const [offset, length] = sliceToRangeArgs(
+            stride * this.length, stride * (start || 0), stride * end
+        );
+        return this.range<R>(offset, length, batch);
+    }
+    protected range<R = T>(index: number, length: number, batch?: number) {
+        const result = new Array<R>(length);
+        for (let i = -1, n = this.length; ++i < length;) {
+            result[i] = this.get((i + index) % n) as any;
+        }
+        return result as Iterable<R>;
+    }
+    *[Symbol.iterator]() {
+        for (let i = -1, n = this.length; ++i < n;) {
+            yield this.get(i);
+        }
+    }
+}
+
+Vector.prototype.length = 0;
+Vector.prototype.stride = 1;
+Vector.prototype.name = Vector.defaultName;
+Vector.prototype.type = Vector.defaultType;
+Vector.prototype.props = Vector.defaultProps;

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/Arrow.ts
----------------------------------------------------------------------
diff --git a/js/test/Arrow.ts b/js/test/Arrow.ts
new file mode 100644
index 0000000..a9ab2b7
--- /dev/null
+++ b/js/test/Arrow.ts
@@ -0,0 +1,67 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+/* tslint:disable */
+// Dynamically load an Ix target build based on command line arguments
+
+const target = process.env.TEST_TARGET;
+const format = process.env.TEST_MODULE;
+const resolve = require('path').resolve;
+
+// these are duplicated in the gulpfile :<
+const targets = [`es5`, `es2015`, `esnext`];
+const formats = [`cjs`, `esm`, `cls`, `umd`];
+
+function throwInvalidImportError(name: string, value: string, values: string[]) {
+    throw new Error('Unrecognized ' + name + ' \'' + value + '\'. Please run tests with \'--' + name + ' <any of ' + values.join(', ') + '>\'');
+}
+
+if (!~targets.indexOf(target)) throwInvalidImportError('target', target, targets);
+if (!~formats.indexOf(format)) throwInvalidImportError('module', format, formats);
+
+let Arrow: any = require(resolve(`./targets/${target}/${format}/Arrow.js`));
+let ArrowInternal: any = require(resolve(`./targets/${target}/${format}/Arrow.internal.js`));
+
+import { vectors as vectors_ } from '../src/Arrow.internal';
+import { Table as Table_, readBuffers as readBuffers_ } from '../src/Arrow';
+
+export let Table = Arrow.Table as typeof Table_;
+export let readBuffers = Arrow.readBuffers as typeof readBuffers_;
+
+export let vectors: typeof vectors_ = ArrowInternal.vectors;
+export namespace vectors {
+    export type Vector<T> = vectors_.Vector<T>;
+    export type BitVector = vectors_.BitVector;
+    export type ListVector<T> = vectors_.ListVector<T>;
+    export type Utf8Vector = vectors_.Utf8Vector;
+    export type DateVector = vectors_.DateVector;
+    export type IndexVector = vectors_.IndexVector;
+    export type Int8Vector = vectors_.Int8Vector;
+    export type Int16Vector = vectors_.Int16Vector;
+    export type Int32Vector = vectors_.Int32Vector;
+    export type Int64Vector = vectors_.Int64Vector;
+    export type Uint8Vector = vectors_.Uint8Vector;
+    export type Uint16Vector = vectors_.Uint16Vector;
+    export type Uint32Vector = vectors_.Uint32Vector;
+    export type Uint64Vector = vectors_.Uint64Vector;
+    export type Float32Vector = vectors_.Float32Vector;
+    export type Float64Vector = vectors_.Float64Vector;
+    export type StructVector = vectors_.StructVector;
+    export type DictionaryVector<T> = vectors_.DictionaryVector<T>;
+    export type FixedSizeListVector<T> = vectors_.FixedSizeListVector<T>;
+};
+

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/test/__snapshots__/reader-tests.ts.snap
----------------------------------------------------------------------
diff --git a/js/test/__snapshots__/reader-tests.ts.snap b/js/test/__snapshots__/reader-tests.ts.snap
new file mode 100644
index 0000000..961ce87
--- /dev/null
+++ b/js/test/__snapshots__/reader-tests.ts.snap
@@ -0,0 +1,497 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"example-csv"`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Struct_"`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `2`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `
+Array [
+  "Hermione",
+  25,
+  Float32Array [
+    -53.235599517822266,
+    40.231998443603516,
+  ],
+]
+`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `
+Array [
+  "Severus",
+  30,
+  Float32Array [
+    -62.22999954223633,
+    3,
+  ],
+]
+`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `"example-csv"`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `"Struct_"`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `1`;
+
+exports[`dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 9`] = `
+Array [
+  "Harry",
+  20,
+  Float32Array [
+    23,
+    -100.23652648925781,
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"example-csv"`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Struct_"`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `2`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `
+Array [
+  "Hermione",
+  25,
+  Float32Array [
+    -53.235599517822266,
+    40.231998443603516,
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `
+Array [
+  "Severus",
+  30,
+  Float32Array [
+    -62.22999954223633,
+    3,
+  ],
+]
+`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `"example-csv"`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `"Struct_"`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `1`;
+
+exports[`dictionary stream Arrow readBuffers enumerates each batch as an Array of Vectors 9`] = `
+Array [
+  "Harry",
+  20,
+  Float32Array [
+    23,
+    -100.23652648925781,
+  ],
+]
+`;
+
+exports[`dictionary2 file Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"struct"`;
+
+exports[`dictionary2 file Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Struct_"`;
+
+exports[`dictionary2 file Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `2`;
+
+exports[`dictionary2 file Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `
+Array [
+  "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+  "Airbus",
+  1502880750,
+  Float32Array [
+    32.45663833618164,
+    1.8712350130081177,
+  ],
+]
+`;
+
+exports[`dictionary2 file Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `
+Array [
+  "50fb46f4-fefa-42c1-919c-0121974cdd00",
+  "Boeing",
+  1502880750,
+  Float32Array [
+    38.766666412353516,
+    -4.181231498718262,
+  ],
+]
+`;
+
+exports[`multi_dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"struct"`;
+
+exports[`multi_dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Struct_"`;
+
+exports[`multi_dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `2`;
+
+exports[`multi_dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `
+Array [
+  "a0fb47f9-f8fb-4403-a64a-786d7611f8ef",
+  "12345",
+  "Airbus",
+  1502880750,
+  Float32Array [
+    32.45663833618164,
+    1.8712350130081177,
+  ],
+]
+`;
+
+exports[`multi_dictionary file Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `
+Array [
+  "50fb46f4-fefa-42c1-919c-0121974cdd00",
+  "67890",
+  "Boeing",
+  1502880750,
+  Float32Array [
+    38.766666412353516,
+    -4.181231498718262,
+  ],
+]
+`;
+
+exports[`multipart count Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"row_count"`;
+
+exports[`multipart count Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Int"`;
+
+exports[`multipart count Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `1`;
+
+exports[`multipart count Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `10000`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"origin_lat"`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"FloatingPoint"`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `5`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `35.393089294433594`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `35.393089294433594`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `35.393089294433594`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `29.533695220947266`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `29.533695220947266`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 9`] = `"origin_lon"`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 10`] = `"FloatingPoint"`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 11`] = `5`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 12`] = `-97.6007308959961`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 13`] = `-97.6007308959961`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 14`] = `-97.6007308959961`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 15`] = `-98.46977996826172`;
+
+exports[`multipart latlong Arrow readBuffers enumerates each batch as an Array of Vectors 16`] = `-98.46977996826172`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"origin_city"`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Utf8"`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `5`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `"Oklahoma City"`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `"Oklahoma City"`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `"Oklahoma City"`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `"San Antonio"`;
+
+exports[`multipart origins Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `"San Antonio"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"foo"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Int"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `5`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `1`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `null`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `3`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `4`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `5`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 9`] = `"bar"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 10`] = `"FloatingPoint"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 11`] = `5`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 12`] = `1`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 13`] = `null`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 14`] = `null`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 15`] = `4`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 16`] = `5`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 17`] = `"baz"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 18`] = `"Utf8"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 19`] = `5`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 20`] = `"aa"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 21`] = `null`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 22`] = `null`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 23`] = `"bbb"`;
+
+exports[`simple file Arrow readBuffers enumerates each batch as an Array of Vectors 24`] = `"cccc"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"foo"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Int"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `5`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `1`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `null`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `3`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `4`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `5`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 9`] = `"bar"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 10`] = `"FloatingPoint"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 11`] = `5`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 12`] = `1`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 13`] = `null`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 14`] = `null`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 15`] = `4`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 16`] = `5`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 17`] = `"baz"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 18`] = `"Utf8"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 19`] = `5`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 20`] = `"aa"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 21`] = `null`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 22`] = `null`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 23`] = `"bbb"`;
+
+exports[`simple stream Arrow readBuffers enumerates each batch as an Array of Vectors 24`] = `"cccc"`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"struct_nullable"`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Struct_"`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `7`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `null`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `
+Array [
+  null,
+  "MhRNxD4",
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `
+Array [
+  137773603,
+  "3F9HBxK",
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `
+Array [
+  410361374,
+  "aVd88fp",
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `null`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 9`] = `
+Array [
+  null,
+  "3loZrRf",
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 10`] = `null`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 11`] = `"struct_nullable"`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 12`] = `"Struct_"`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 13`] = `10`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 14`] = `null`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 15`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 16`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 17`] = `null`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 18`] = `
+Array [
+  null,
+  "78SLiRw",
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 19`] = `null`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 20`] = `null`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 21`] = `
+Array [
+  null,
+  "0ilsf82",
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 22`] = `
+Array [
+  null,
+  "LjS9MbU",
+]
+`;
+
+exports[`struct file Arrow readBuffers enumerates each batch as an Array of Vectors 23`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 1`] = `"struct_nullable"`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 2`] = `"Struct_"`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 3`] = `7`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 4`] = `null`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 5`] = `
+Array [
+  null,
+  "MhRNxD4",
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 6`] = `
+Array [
+  137773603,
+  "3F9HBxK",
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 7`] = `
+Array [
+  410361374,
+  "aVd88fp",
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 8`] = `null`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 9`] = `
+Array [
+  null,
+  "3loZrRf",
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 10`] = `null`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 11`] = `"struct_nullable"`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 12`] = `"Struct_"`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 13`] = `10`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 14`] = `null`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 15`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 16`] = `
+Array [
+  null,
+  null,
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 17`] = `null`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 18`] = `
+Array [
+  null,
+  "78SLiRw",
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 19`] = `null`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 20`] = `null`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 21`] = `
+Array [
+  null,
+  "0ilsf82",
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 22`] = `
+Array [
+  null,
+  "LjS9MbU",
+]
+`;
+
+exports[`struct stream Arrow readBuffers enumerates each batch as an Array of Vectors 23`] = `
+Array [
+  null,
+  null,
+]
+`;


[3/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/format/Schema_generated.ts
----------------------------------------------------------------------
diff --git a/js/src/format/Schema_generated.ts b/js/src/format/Schema_generated.ts
new file mode 100644
index 0000000..65493b7
--- /dev/null
+++ b/js/src/format/Schema_generated.ts
@@ -0,0 +1,2254 @@
+/* tslint:disable:class-name */
+// automatically generated by the FlatBuffers compiler, do not modify
+
+import { flatbuffers } from 'flatbuffers';
+/**
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum MetadataVersion {
+    V1 = 0,
+    V2 = 1,
+    V3 = 2
+  }
+}
+
+/**
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum UnionMode {
+    Sparse = 0,
+    Dense = 1
+  }
+}
+
+/**
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum Precision {
+    HALF = 0,
+    SINGLE = 1,
+    DOUBLE = 2
+  }
+}
+
+/**
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum DateUnit {
+    DAY = 0,
+    MILLISECOND = 1
+  }
+}
+
+/**
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum TimeUnit {
+    SECOND = 0,
+    MILLISECOND = 1,
+    MICROSECOND = 2,
+    NANOSECOND = 3
+  }
+}
+
+/**
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum IntervalUnit {
+    YEAR_MONTH = 0,
+    DAY_TIME = 1
+  }
+}
+
+/**
+ * ----------------------------------------------------------------------
+ * Top-level Type value, enabling extensible type-specific metadata. We can
+ * add new logical types to Type without breaking backwards compatibility
+ *
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum Type {
+    NONE = 0,
+    Null = 1,
+    Int = 2,
+    FloatingPoint = 3,
+    Binary = 4,
+    Utf8 = 5,
+    Bool = 6,
+    Decimal = 7,
+    Date = 8,
+    Time = 9,
+    Timestamp = 10,
+    Interval = 11,
+    List = 12,
+    Struct_ = 13,
+    Union = 14,
+    FixedSizeBinary = 15,
+    FixedSizeList = 16,
+    Map = 17
+  }
+}
+
+/**
+ * ----------------------------------------------------------------------
+ * The possible types of a vector
+ *
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum VectorType {
+    /**
+     * used in List type, Dense Union and variable length primitive types (String, Binary)
+     */
+    OFFSET = 0,
+
+    /**
+     * actual data, either wixed width primitive types in slots or variable width delimited by an OFFSET vector
+     */
+    DATA = 1,
+
+    /**
+     * Bit vector indicating if each value is null
+     */
+    VALIDITY = 2,
+
+    /**
+     * Type vector used in Union type
+     */
+    TYPE = 3
+  }
+}
+
+/**
+ * ----------------------------------------------------------------------
+ * Endianness of the platform producing the data
+ *
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum Endianness {
+    Little = 0,
+    Big = 1
+  }
+}
+
+/**
+ * These are stored in the flatbuffer in the Type union below
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Null {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Null}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Null {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Null=} obj
+     * @returns {Null}
+     */
+    static getRootAsNull(bb: flatbuffers.ByteBuffer, obj?: Null): Null {
+      return (obj || new Null).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startNull(builder: flatbuffers.Builder) {
+      builder.startObject(0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endNull(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * A Struct_ in the flatbuffer metadata is the same as an Arrow Struct
+ * (according to the physical memory layout). We used Struct_ here as
+ * Struct is a reserved word in Flatbuffers
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Struct_ {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Struct_}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Struct_ {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Struct_=} obj
+     * @returns {Struct_}
+     */
+    static getRootAsStruct_(bb: flatbuffers.ByteBuffer, obj?: Struct_): Struct_ {
+      return (obj || new Struct_).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startStruct_(builder: flatbuffers.Builder) {
+      builder.startObject(0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endStruct_(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class List {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {List}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): List {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {List=} obj
+     * @returns {List}
+     */
+    static getRootAsList(bb: flatbuffers.ByteBuffer, obj?: List): List {
+      return (obj || new List).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startList(builder: flatbuffers.Builder) {
+      builder.startObject(0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endList(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class FixedSizeList {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {FixedSizeList}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): FixedSizeList {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {FixedSizeList=} obj
+     * @returns {FixedSizeList}
+     */
+    static getRootAsFixedSizeList(bb: flatbuffers.ByteBuffer, obj?: FixedSizeList): FixedSizeList {
+      return (obj || new FixedSizeList).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * Number of list items per value
+     *
+     * @returns {number}
+     */
+    listSize(): number {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startFixedSizeList(builder: flatbuffers.Builder) {
+      builder.startObject(1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} listSize
+     */
+    static addListSize(builder: flatbuffers.Builder, listSize: number) {
+      builder.addFieldInt32(0, listSize, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endFixedSizeList(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * A Map is a logical nested type that is represented as
+ *
+ * List<entry: Struct<key: K, value: V>>
+ *
+ * In this layout, the keys and values are each respectively contiguous. We do
+ * not constrain the key and value types, so the application is responsible
+ * for ensuring that the keys are hashable and unique. Whether the keys are sorted
+ * may be set in the metadata for this field
+ *
+ * In a Field with Map type, the Field has a child Struct field, which then
+ * has two children: key type and the second the value type. The names of the
+ * child fields may be respectively "entry", "key", and "value", but this is
+ * not enforced
+ *
+ * Map
+ *   - child[0] entry: Struct
+ *     - child[0] key: K
+ *     - child[1] value: V
+ *
+ * Neither the "entry" field nor the "key" field may be nullable.
+ *
+ * The metadata is structured so that Arrow systems without special handling
+ * for Map can make Map an alias for List. The "layout" attribute for the Map
+ * field must have the same contents as a List.
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Map {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Map}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Map {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Map=} obj
+     * @returns {Map}
+     */
+    static getRootAsMap(bb: flatbuffers.ByteBuffer, obj?: Map): Map {
+      return (obj || new Map).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * Set to true if the keys within each value are sorted
+     *
+     * @returns {boolean}
+     */
+    keysSorted(): boolean {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startMap(builder: flatbuffers.Builder) {
+      builder.startObject(1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {boolean} keysSorted
+     */
+    static addKeysSorted(builder: flatbuffers.Builder, keysSorted: boolean) {
+      builder.addFieldInt8(0, +keysSorted, +false);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endMap(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * A union is a complex type with children in Field
+ * By default ids in the type vector refer to the offsets in the children
+ * optionally typeIds provides an indirection between the child offset and the type id
+ * for each child typeIds[offset] is the id used in the type vector
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Union {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Union}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Union {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Union=} obj
+     * @returns {Union}
+     */
+    static getRootAsUnion(bb: flatbuffers.ByteBuffer, obj?: Union): Union {
+      return (obj || new Union).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.UnionMode}
+     */
+    mode(): org.apache.arrow.flatbuf.UnionMode {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.UnionMode} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.UnionMode.Sparse;
+    }
+
+    /**
+     * @param {number} index
+     * @returns {number}
+     */
+    typeIds(index: number): number | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.readInt32(this.bb.__vector(this.bb_pos + offset) + index * 4) : 0;
+    }
+
+    /**
+     * @returns {number}
+     */
+    typeIdsLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @returns {Int32Array}
+     */
+    typeIdsArray(): Int32Array | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? new Int32Array(this.bb.bytes().buffer, this.bb.bytes().byteOffset + this.bb.__vector(this.bb_pos + offset), this.bb.__vector_len(this.bb_pos + offset)) : null;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startUnion(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.UnionMode} mode
+     */
+    static addMode(builder: flatbuffers.Builder, mode: org.apache.arrow.flatbuf.UnionMode) {
+      builder.addFieldInt16(0, mode, org.apache.arrow.flatbuf.UnionMode.Sparse);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} typeIdsOffset
+     */
+    static addTypeIds(builder: flatbuffers.Builder, typeIdsOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, typeIdsOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {Array.<number>} data
+     * @returns {flatbuffers.Offset}
+     */
+    static createTypeIdsVector(builder: flatbuffers.Builder, data: number[] | Uint8Array): flatbuffers.Offset {
+      builder.startVector(4, data.length, 4);
+      for (let i = data.length - 1; i >= 0; i--) {
+        builder.addInt32(data[i]);
+      }
+      return builder.endVector();
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startTypeIdsVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(4, numElems, 4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endUnion(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Int {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Int}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Int {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Int=} obj
+     * @returns {Int}
+     */
+    static getRootAsInt(bb: flatbuffers.ByteBuffer, obj?: Int): Int {
+      return (obj || new Int).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {number}
+     */
+    bitWidth(): number {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @returns {boolean}
+     */
+    isSigned(): boolean {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startInt(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} bitWidth
+     */
+    static addBitWidth(builder: flatbuffers.Builder, bitWidth: number) {
+      builder.addFieldInt32(0, bitWidth, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {boolean} isSigned
+     */
+    static addIsSigned(builder: flatbuffers.Builder, isSigned: boolean) {
+      builder.addFieldInt8(1, +isSigned, +false);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endInt(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class FloatingPoint {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {FloatingPoint}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): FloatingPoint {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {FloatingPoint=} obj
+     * @returns {FloatingPoint}
+     */
+    static getRootAsFloatingPoint(bb: flatbuffers.ByteBuffer, obj?: FloatingPoint): FloatingPoint {
+      return (obj || new FloatingPoint).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.Precision}
+     */
+    precision(): org.apache.arrow.flatbuf.Precision {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.Precision} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Precision.HALF;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startFloatingPoint(builder: flatbuffers.Builder) {
+      builder.startObject(1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.Precision} precision
+     */
+    static addPrecision(builder: flatbuffers.Builder, precision: org.apache.arrow.flatbuf.Precision) {
+      builder.addFieldInt16(0, precision, org.apache.arrow.flatbuf.Precision.HALF);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endFloatingPoint(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * Unicode with UTF-8 encoding
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Utf8 {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Utf8}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Utf8 {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Utf8=} obj
+     * @returns {Utf8}
+     */
+    static getRootAsUtf8(bb: flatbuffers.ByteBuffer, obj?: Utf8): Utf8 {
+      return (obj || new Utf8).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startUtf8(builder: flatbuffers.Builder) {
+      builder.startObject(0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endUtf8(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Binary {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Binary}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Binary {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Binary=} obj
+     * @returns {Binary}
+     */
+    static getRootAsBinary(bb: flatbuffers.ByteBuffer, obj?: Binary): Binary {
+      return (obj || new Binary).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startBinary(builder: flatbuffers.Builder) {
+      builder.startObject(0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endBinary(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class FixedSizeBinary {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {FixedSizeBinary}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): FixedSizeBinary {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {FixedSizeBinary=} obj
+     * @returns {FixedSizeBinary}
+     */
+    static getRootAsFixedSizeBinary(bb: flatbuffers.ByteBuffer, obj?: FixedSizeBinary): FixedSizeBinary {
+      return (obj || new FixedSizeBinary).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * Number of bytes per value
+     *
+     * @returns {number}
+     */
+    byteWidth(): number {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startFixedSizeBinary(builder: flatbuffers.Builder) {
+      builder.startObject(1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} byteWidth
+     */
+    static addByteWidth(builder: flatbuffers.Builder, byteWidth: number) {
+      builder.addFieldInt32(0, byteWidth, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endFixedSizeBinary(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Bool {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Bool}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Bool {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Bool=} obj
+     * @returns {Bool}
+     */
+    static getRootAsBool(bb: flatbuffers.ByteBuffer, obj?: Bool): Bool {
+      return (obj || new Bool).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startBool(builder: flatbuffers.Builder) {
+      builder.startObject(0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endBool(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Decimal {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Decimal}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Decimal {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Decimal=} obj
+     * @returns {Decimal}
+     */
+    static getRootAsDecimal(bb: flatbuffers.ByteBuffer, obj?: Decimal): Decimal {
+      return (obj || new Decimal).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * Total number of decimal digits
+     *
+     * @returns {number}
+     */
+    precision(): number {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * Number of digits after the decimal point "."
+     *
+     * @returns {number}
+     */
+    scale(): number {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startDecimal(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} precision
+     */
+    static addPrecision(builder: flatbuffers.Builder, precision: number) {
+      builder.addFieldInt32(0, precision, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} scale
+     */
+    static addScale(builder: flatbuffers.Builder, scale: number) {
+      builder.addFieldInt32(1, scale, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endDecimal(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * Date is either a 32-bit or 64-bit type representing elapsed time since UNIX
+ * epoch (1970-01-01), stored in either of two units:
+ *
+ * * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no
+ *   leap seconds), where the values are evenly divisible by 86400000
+ * * Days (32 bits) since the UNIX epoch
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Date {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Date}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Date {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Date=} obj
+     * @returns {Date}
+     */
+    static getRootAsDate(bb: flatbuffers.ByteBuffer, obj?: Date): Date {
+      return (obj || new Date).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.DateUnit}
+     */
+    unit(): org.apache.arrow.flatbuf.DateUnit {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.DateUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.DateUnit.MILLISECOND;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startDate(builder: flatbuffers.Builder) {
+      builder.startObject(1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.DateUnit} unit
+     */
+    static addUnit(builder: flatbuffers.Builder, unit: org.apache.arrow.flatbuf.DateUnit) {
+      builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.DateUnit.MILLISECOND);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endDate(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * Time type. The physical storage type depends on the unit
+ * - SECOND and MILLISECOND: 32 bits
+ * - MICROSECOND and NANOSECOND: 64 bits
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Time {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Time}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Time {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Time=} obj
+     * @returns {Time}
+     */
+    static getRootAsTime(bb: flatbuffers.ByteBuffer, obj?: Time): Time {
+      return (obj || new Time).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.TimeUnit}
+     */
+    unit(): org.apache.arrow.flatbuf.TimeUnit {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.TimeUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.TimeUnit.MILLISECOND;
+    }
+
+    /**
+     * @returns {number}
+     */
+    bitWidth(): number {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.readInt32(this.bb_pos + offset) : 32;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startTime(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.TimeUnit} unit
+     */
+    static addUnit(builder: flatbuffers.Builder, unit: org.apache.arrow.flatbuf.TimeUnit) {
+      builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.TimeUnit.MILLISECOND);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} bitWidth
+     */
+    static addBitWidth(builder: flatbuffers.Builder, bitWidth: number) {
+      builder.addFieldInt32(1, bitWidth, 32);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endTime(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * Time elapsed from the Unix epoch, 00:00:00.000 on 1 January 1970, excluding
+ * leap seconds, as a 64-bit integer. Note that UNIX time does not include
+ * leap seconds.
+ *
+ * The Timestamp metadata supports both "time zone naive" and "time zone
+ * aware" timestamps. Read about the timezone attribute for more detail
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Timestamp {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Timestamp}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Timestamp {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Timestamp=} obj
+     * @returns {Timestamp}
+     */
+    static getRootAsTimestamp(bb: flatbuffers.ByteBuffer, obj?: Timestamp): Timestamp {
+      return (obj || new Timestamp).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.TimeUnit}
+     */
+    unit(): org.apache.arrow.flatbuf.TimeUnit {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.TimeUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.TimeUnit.SECOND;
+    }
+
+    /**
+     * The time zone is a string indicating the name of a time zone, one of:
+     *
+     * * As used in the Olson time zone database (the "tz database" or
+     *   "tzdata"), such as "America/New_York"
+     * * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30
+     *
+     * Whether a timezone string is present indicates different semantics about
+     * the data:
+     *
+     * * If the time zone is null or equal to an empty string, the data is "time
+     *   zone naive" and shall be displayed *as is* to the user, not localized
+     *   to the locale of the user. This data can be though of as UTC but
+     *   without having "UTC" as the time zone, it is not considered to be
+     *   localized to any time zone
+     *
+     * * If the time zone is set to a valid value, values can be displayed as
+     *   "localized" to that time zone, even though the underlying 64-bit
+     *   integers are identical to the same data stored in UTC. Converting
+     *   between time zones is a metadata-only operation and does not change the
+     *   underlying values
+     *
+     * @param {flatbuffers.Encoding=} optionalEncoding
+     * @returns {string|Uint8Array|null}
+     */
+    timezone(): string | null;
+    timezone(optionalEncoding: flatbuffers.Encoding): string | Uint8Array | null;
+    timezone(optionalEncoding?: any): string | Uint8Array | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startTimestamp(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.TimeUnit} unit
+     */
+    static addUnit(builder: flatbuffers.Builder, unit: org.apache.arrow.flatbuf.TimeUnit) {
+      builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.TimeUnit.SECOND);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} timezoneOffset
+     */
+    static addTimezone(builder: flatbuffers.Builder, timezoneOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, timezoneOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endTimestamp(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Interval {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Interval}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Interval {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Interval=} obj
+     * @returns {Interval}
+     */
+    static getRootAsInterval(bb: flatbuffers.ByteBuffer, obj?: Interval): Interval {
+      return (obj || new Interval).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.IntervalUnit}
+     */
+    unit(): org.apache.arrow.flatbuf.IntervalUnit {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.IntervalUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.IntervalUnit.YEAR_MONTH;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startInterval(builder: flatbuffers.Builder) {
+      builder.startObject(1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.IntervalUnit} unit
+     */
+    static addUnit(builder: flatbuffers.Builder, unit: org.apache.arrow.flatbuf.IntervalUnit) {
+      builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.IntervalUnit.YEAR_MONTH);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endInterval(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * represents the physical layout of a buffer
+ * buffers have fixed width slots of a given type
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class VectorLayout {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {VectorLayout}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): VectorLayout {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {VectorLayout=} obj
+     * @returns {VectorLayout}
+     */
+    static getRootAsVectorLayout(bb: flatbuffers.ByteBuffer, obj?: VectorLayout): VectorLayout {
+      return (obj || new VectorLayout).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * the width of a slot in the buffer (typically 1, 8, 16, 32 or 64)
+     *
+     * @returns {number}
+     */
+    bitWidth(): number {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt16(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * the purpose of the vector
+     *
+     * @returns {org.apache.arrow.flatbuf.VectorType}
+     */
+    type(): org.apache.arrow.flatbuf.VectorType {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? /** @type {org.apache.arrow.flatbuf.VectorType} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.VectorType.OFFSET;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startVectorLayout(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} bitWidth
+     */
+    static addBitWidth(builder: flatbuffers.Builder, bitWidth: number) {
+      builder.addFieldInt16(0, bitWidth, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.VectorType} type
+     */
+    static addType(builder: flatbuffers.Builder, type: org.apache.arrow.flatbuf.VectorType) {
+      builder.addFieldInt16(1, type, org.apache.arrow.flatbuf.VectorType.OFFSET);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endVectorLayout(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * user defined key value pairs to add custom metadata to arrow
+ * key namespacing is the responsibility of the user
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class KeyValue {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {KeyValue}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): KeyValue {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {KeyValue=} obj
+     * @returns {KeyValue}
+     */
+    static getRootAsKeyValue(bb: flatbuffers.ByteBuffer, obj?: KeyValue): KeyValue {
+      return (obj || new KeyValue).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Encoding=} optionalEncoding
+     * @returns {string|Uint8Array|null}
+     */
+    key(): string | null;
+    key(optionalEncoding: flatbuffers.Encoding): string | Uint8Array | null;
+    key(optionalEncoding?: any): string | Uint8Array | null {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+    }
+
+    /**
+     * @param {flatbuffers.Encoding=} optionalEncoding
+     * @returns {string|Uint8Array|null}
+     */
+    value(): string | null;
+    value(optionalEncoding: flatbuffers.Encoding): string | Uint8Array | null;
+    value(optionalEncoding?: any): string | Uint8Array | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startKeyValue(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} keyOffset
+     */
+    static addKey(builder: flatbuffers.Builder, keyOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(0, keyOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} valueOffset
+     */
+    static addValue(builder: flatbuffers.Builder, valueOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, valueOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endKeyValue(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * Dictionary encoding metadata
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class DictionaryEncoding {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {DictionaryEncoding}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): DictionaryEncoding {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {DictionaryEncoding=} obj
+     * @returns {DictionaryEncoding}
+     */
+    static getRootAsDictionaryEncoding(bb: flatbuffers.ByteBuffer, obj?: DictionaryEncoding): DictionaryEncoding {
+      return (obj || new DictionaryEncoding).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * The known dictionary id in the application where this data is used. In
+     * the file or streaming formats, the dictionary ids are found in the
+     * DictionaryBatch messages
+     *
+     * @returns {flatbuffers.Long}
+     */
+    id(): flatbuffers.Long {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+    }
+
+    /**
+     * The dictionary indices are constrained to be positive integers. If this
+     * field is null, the indices must be signed int32
+     *
+     * @param {org.apache.arrow.flatbuf.Int=} obj
+     * @returns {org.apache.arrow.flatbuf.Int|null}
+     */
+    indexType(obj?: org.apache.arrow.flatbuf.Int): org.apache.arrow.flatbuf.Int | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new org.apache.arrow.flatbuf.Int).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+    }
+
+    /**
+     * By default, dictionaries are not ordered, or the order does not have
+     * semantic meaning. In some statistical, applications, dictionary-encoding
+     * is used to represent ordered categorical data, and we provide a way to
+     * preserve that metadata here
+     *
+     * @returns {boolean}
+     */
+    isOrdered(): boolean {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startDictionaryEncoding(builder: flatbuffers.Builder) {
+      builder.startObject(3);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} id
+     */
+    static addId(builder: flatbuffers.Builder, id: flatbuffers.Long) {
+      builder.addFieldInt64(0, id, builder.createLong(0, 0));
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} indexTypeOffset
+     */
+    static addIndexType(builder: flatbuffers.Builder, indexTypeOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, indexTypeOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {boolean} isOrdered
+     */
+    static addIsOrdered(builder: flatbuffers.Builder, isOrdered: boolean) {
+      builder.addFieldInt8(2, +isOrdered, +false);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endDictionaryEncoding(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * A field represents a named column in a record / row batch or child of a
+ * nested type.
+ *
+ * - children is only for nested Arrow arrays
+ * - For primitive types, children will have length 0
+ * - nullable should default to true in general
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Field {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Field}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Field {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Field=} obj
+     * @returns {Field}
+     */
+    static getRootAsField(bb: flatbuffers.ByteBuffer, obj?: Field): Field {
+      return (obj || new Field).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @param {flatbuffers.Encoding=} optionalEncoding
+     * @returns {string|Uint8Array|null}
+     */
+    name(): string | null;
+    name(optionalEncoding: flatbuffers.Encoding): string | Uint8Array | null;
+    name(optionalEncoding?: any): string | Uint8Array | null {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+    }
+
+    /**
+     * @returns {boolean}
+     */
+    nullable(): boolean {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.Type}
+     */
+    typeType(): org.apache.arrow.flatbuf.Type {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? /** @type {org.apache.arrow.flatbuf.Type} */ (this.bb.readUint8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Type.NONE;
+    }
+
+    /**
+     * @param {flatbuffers.Table} obj
+     * @returns {?flatbuffers.Table}
+     */
+    type<T extends flatbuffers.Table>(obj: T): T | null {
+      let offset = this.bb.__offset(this.bb_pos, 10);
+      return offset ? this.bb.__union(obj, this.bb_pos + offset) : null;
+    }
+
+    /**
+     * @param {org.apache.arrow.flatbuf.DictionaryEncoding=} obj
+     * @returns {org.apache.arrow.flatbuf.DictionaryEncoding|null}
+     */
+    dictionary(obj?: org.apache.arrow.flatbuf.DictionaryEncoding): org.apache.arrow.flatbuf.DictionaryEncoding | null {
+      let offset = this.bb.__offset(this.bb_pos, 12);
+      return offset ? (obj || new org.apache.arrow.flatbuf.DictionaryEncoding).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Field=} obj
+     * @returns {org.apache.arrow.flatbuf.Field}
+     */
+    children(index: number, obj?: org.apache.arrow.flatbuf.Field): org.apache.arrow.flatbuf.Field | null {
+      let offset = this.bb.__offset(this.bb_pos, 14);
+      return offset ? (obj || new org.apache.arrow.flatbuf.Field).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    childrenLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 14);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * layout of buffers produced for this type (as derived from the Type)
+     * does not include children
+     * each recordbatch will return instances of those Buffers.
+     *
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.VectorLayout=} obj
+     * @returns {org.apache.arrow.flatbuf.VectorLayout}
+     */
+    layout(index: number, obj?: org.apache.arrow.flatbuf.VectorLayout): org.apache.arrow.flatbuf.VectorLayout | null {
+      let offset = this.bb.__offset(this.bb_pos, 16);
+      return offset ? (obj || new org.apache.arrow.flatbuf.VectorLayout).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    layoutLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 16);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.KeyValue=} obj
+     * @returns {org.apache.arrow.flatbuf.KeyValue}
+     */
+    customMetadata(index: number, obj?: org.apache.arrow.flatbuf.KeyValue): org.apache.arrow.flatbuf.KeyValue | null {
+      let offset = this.bb.__offset(this.bb_pos, 18);
+      return offset ? (obj || new org.apache.arrow.flatbuf.KeyValue).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    customMetadataLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 18);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startField(builder: flatbuffers.Builder) {
+      builder.startObject(8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} nameOffset
+     */
+    static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(0, nameOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {boolean} nullable
+     */
+    static addNullable(builder: flatbuffers.Builder, nullable: boolean) {
+      builder.addFieldInt8(1, +nullable, +false);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.Type} typeType
+     */
+    static addTypeType(builder: flatbuffers.Builder, typeType: org.apache.arrow.flatbuf.Type) {
+      builder.addFieldInt8(2, typeType, org.apache.arrow.flatbuf.Type.NONE);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} typeOffset
+     */
+    static addType(builder: flatbuffers.Builder, typeOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(3, typeOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} dictionaryOffset
+     */
+    static addDictionary(builder: flatbuffers.Builder, dictionaryOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(4, dictionaryOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} childrenOffset
+     */
+    static addChildren(builder: flatbuffers.Builder, childrenOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(5, childrenOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {Array.<flatbuffers.Offset>} data
+     * @returns {flatbuffers.Offset}
+     */
+    static createChildrenVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {
+      builder.startVector(4, data.length, 4);
+      for (let i = data.length - 1; i >= 0; i--) {
+        builder.addOffset(data[i]);
+      }
+      return builder.endVector();
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startChildrenVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(4, numElems, 4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} layoutOffset
+     */
+    static addLayout(builder: flatbuffers.Builder, layoutOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(6, layoutOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {Array.<flatbuffers.Offset>} data
+     * @returns {flatbuffers.Offset}
+     */
+    static createLayoutVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {
+      builder.startVector(4, data.length, 4);
+      for (let i = data.length - 1; i >= 0; i--) {
+        builder.addOffset(data[i]);
+      }
+      return builder.endVector();
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startLayoutVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(4, numElems, 4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} customMetadataOffset
+     */
+    static addCustomMetadata(builder: flatbuffers.Builder, customMetadataOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(7, customMetadataOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {Array.<flatbuffers.Offset>} data
+     * @returns {flatbuffers.Offset}
+     */
+    static createCustomMetadataVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {
+      builder.startVector(4, data.length, 4);
+      for (let i = data.length - 1; i >= 0; i--) {
+        builder.addOffset(data[i]);
+      }
+      return builder.endVector();
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startCustomMetadataVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(4, numElems, 4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endField(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * A Buffer represents a single contiguous memory segment
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Buffer {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Buffer}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Buffer {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * The shared memory page id where this buffer is located. Currently this is
+     * not used
+     *
+     * @returns {number}
+     */
+    page(): number {
+      return this.bb.readInt32(this.bb_pos);
+    }
+
+    /**
+     * The relative offset into the shared memory page where the bytes for this
+     * buffer starts
+     *
+     * @returns {flatbuffers.Long}
+     */
+    offset(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos + 8);
+    }
+
+    /**
+     * The absolute length (in bytes) of the memory buffer. The memory is found
+     * from offset (inclusive) to offset + length (non-inclusive).
+     *
+     * @returns {flatbuffers.Long}
+     */
+    length(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos + 16);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} page
+     * @param {flatbuffers.Long} offset
+     * @param {flatbuffers.Long} length
+     * @returns {flatbuffers.Offset}
+     */
+    static createBuffer(builder: flatbuffers.Builder, page: number, offset: flatbuffers.Long, length: flatbuffers.Long): flatbuffers.Offset {
+      builder.prep(8, 24);
+      builder.writeInt64(length);
+      builder.writeInt64(offset);
+      builder.pad(4);
+      builder.writeInt32(page);
+      return builder.offset();
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * A Schema describes the columns in a row batch
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Schema {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Schema}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Schema {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Schema=} obj
+     * @returns {Schema}
+     */
+    static getRootAsSchema(bb: flatbuffers.ByteBuffer, obj?: Schema): Schema {
+      return (obj || new Schema).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * endianness of the buffer
+     * it is Little Endian by default
+     * if endianness doesn't match the underlying system then the vectors need to be converted
+     *
+     * @returns {org.apache.arrow.flatbuf.Endianness}
+     */
+    endianness(): org.apache.arrow.flatbuf.Endianness {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.Endianness} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Endianness.Little;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Field=} obj
+     * @returns {org.apache.arrow.flatbuf.Field}
+     */
+    fields(index: number, obj?: org.apache.arrow.flatbuf.Field): org.apache.arrow.flatbuf.Field | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new org.apache.arrow.flatbuf.Field).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    fieldsLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.KeyValue=} obj
+     * @returns {org.apache.arrow.flatbuf.KeyValue}
+     */
+    customMetadata(index: number, obj?: org.apache.arrow.flatbuf.KeyValue): org.apache.arrow.flatbuf.KeyValue | null {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? (obj || new org.apache.arrow.flatbuf.KeyValue).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    customMetadataLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startSchema(builder: flatbuffers.Builder) {
+      builder.startObject(3);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.Endianness} endianness
+     */
+    static addEndianness(builder: flatbuffers.Builder, endianness: org.apache.arrow.flatbuf.Endianness) {
+      builder.addFieldInt16(0, endianness, org.apache.arrow.flatbuf.Endianness.Little);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} fieldsOffset
+     */
+    static addFields(builder: flatbuffers.Builder, fieldsOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, fieldsOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {Array.<flatbuffers.Offset>} data
+     * @returns {flatbuffers.Offset}
+     */
+    static createFieldsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {
+      builder.startVector(4, data.length, 4);
+      for (let i = data.length - 1; i >= 0; i--) {
+        builder.addOffset(data[i]);
+      }
+      return builder.endVector();
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startFieldsVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(4, numElems, 4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} customMetadataOffset
+     */
+    static addCustomMetadata(builder: flatbuffers.Builder, customMetadataOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(2, customMetadataOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {Array.<flatbuffers.Offset>} data
+     * @returns {flatbuffers.Offset}
+     */
+    static createCustomMetadataVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {
+      builder.startVector(4, data.length, 4);
+      for (let i = data.length - 1; i >= 0; i--) {
+        builder.addOffset(data[i]);
+      }
+      return builder.endVector();
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startCustomMetadataVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(4, numElems, 4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endSchema(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} offset
+     */
+    static finishSchemaBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
+      builder.finish(offset);
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/reader/arrow.ts
----------------------------------------------------------------------
diff --git a/js/src/reader/arrow.ts b/js/src/reader/arrow.ts
new file mode 100644
index 0000000..9716c7f
--- /dev/null
+++ b/js/src/reader/arrow.ts
@@ -0,0 +1,78 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { flatbuffers } from 'flatbuffers';
+import * as Schema_ from '../format/Schema_generated';
+import * as Message_ from '../format/Message_generated';
+
+import { readFile } from './file';
+import { readStream } from './stream';
+import { readVector } from './vector';
+import { Vector } from '../vector/vector';
+import { readDictionaries } from './dictionary';
+
+import ByteBuffer = flatbuffers.ByteBuffer;
+export import Schema = Schema_.org.apache.arrow.flatbuf.Schema;
+export import RecordBatch = Message_.org.apache.arrow.flatbuf.RecordBatch;
+export type Dictionaries = { [k: string]: Vector<any> };
+export type IteratorState = { nodeIndex: number; bufferIndex: number };
+
+export function* readRecords(...bytes: ByteBuffer[]) {
+    try {
+        yield* readFile(...bytes);
+    } catch (e) {
+        try {
+            yield* readStream(...bytes);
+        } catch (e) {
+            throw new Error('Invalid Arrow buffer');
+        }
+    }
+}
+
+export function* readBuffers(...bytes: Array<Uint8Array | Buffer | string>) {
+    const dictionaries: Dictionaries = {};
+    const byteBuffers = bytes.map(toByteBuffer);
+    for (let { schema, batch } of readRecords(...byteBuffers)) {
+        let vectors: Vector<any>[] = [];
+        let state = { nodeIndex: 0, bufferIndex: 0 };
+        let index = -1, fieldsLength = schema.fieldsLength();
+        if (batch.id) {
+            while (++index < fieldsLength) {
+                for (let [id, vector] of readDictionaries(schema.fields(index), batch, state, dictionaries)) {
+                    dictionaries[id] = dictionaries[id] && dictionaries[id].concat(vector) || vector;
+                }
+            }
+        } else {
+            while (++index < fieldsLength) {
+                vectors[index] = readVector(schema.fields(index), batch, state, dictionaries);
+            }
+            yield vectors;
+        }
+    }
+}
+
+function toByteBuffer(bytes?: Uint8Array | Buffer | string) {
+    let arr: Uint8Array = bytes as any || new Uint8Array(0);
+    if (typeof bytes === 'string') {
+        arr = new Uint8Array(bytes.length);
+        for (let i = -1, n = bytes.length; ++i < n;) {
+            arr[i] = bytes.charCodeAt(i);
+        }
+        return new ByteBuffer(arr);
+    }
+    return new ByteBuffer(arr);
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/reader/dictionary.ts
----------------------------------------------------------------------
diff --git a/js/src/reader/dictionary.ts b/js/src/reader/dictionary.ts
new file mode 100644
index 0000000..aef2bc9
--- /dev/null
+++ b/js/src/reader/dictionary.ts
@@ -0,0 +1,43 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { readVector } from './vector';
+import { MessageBatch } from './message';
+import * as Schema_ from '../format/Schema_generated';
+import { IteratorState, Dictionaries } from './arrow';
+
+import Field = Schema_.org.apache.arrow.flatbuf.Field;
+import DictionaryEncoding = Schema_.org.apache.arrow.flatbuf.DictionaryEncoding;
+
+export function* readDictionaries(field: Field,
+                                  batch: MessageBatch,
+                                  iterator: IteratorState,
+                                  dictionaries: Dictionaries) {
+    let id: string, encoding: DictionaryEncoding;
+    if ((encoding = field.dictionary()) &&
+        batch.id === (id = encoding.id().toFloat64().toString())) {
+        yield [id, readVector(field, batch, iterator, null)];
+        return;
+    }
+    for (let i = -1, n = field.childrenLength(); ++i < n;) {
+        // Since a dictionary batch can only contain a single vector, return early after we find it
+        for (let result of readDictionaries(field.children(i), batch, iterator, dictionaries)) {
+            yield result;
+            return;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/reader/file.ts
----------------------------------------------------------------------
diff --git a/js/src/reader/file.ts b/js/src/reader/file.ts
new file mode 100644
index 0000000..b05b99a
--- /dev/null
+++ b/js/src/reader/file.ts
@@ -0,0 +1,79 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { flatbuffers } from 'flatbuffers';
+import * as File_ from '../format/File_generated';
+import * as Schema_ from '../format/Schema_generated';
+import * as Message_ from '../format/Message_generated';
+import { PADDING, readMessageBatches } from './message';
+
+import ByteBuffer = flatbuffers.ByteBuffer;
+import Footer = File_.org.apache.arrow.flatbuf.Footer;
+export import Schema = Schema_.org.apache.arrow.flatbuf.Schema;
+export import RecordBatch = Message_.org.apache.arrow.flatbuf.RecordBatch;
+
+const MAGIC_STR = 'ARROW1';
+const MAGIC = new Uint8Array(MAGIC_STR.length);
+for (let i = 0; i < MAGIC_STR.length; i += 1 | 0) {
+    MAGIC[i] = MAGIC_STR.charCodeAt(i);
+}
+
+export function _checkMagic(buffer: Uint8Array, index = 0) {
+    for (let i = -1, n = MAGIC.length; ++i < n;) {
+        if (MAGIC[i] !== buffer[index + i]) {
+            return false;
+        }
+    }
+    return true;
+}
+
+const magicLength = MAGIC.length;
+const magicAndPadding = magicLength + PADDING;
+const magicX2AndPadding = magicLength * 2 + PADDING;
+
+export function* readFile(...bbs: ByteBuffer[]) {
+    for (let bb of bbs) {
+        let fileLength = bb.capacity();
+        let footerLength: number, footerOffset: number;
+        if ((fileLength < magicX2AndPadding /*                     Arrow buffer too small */) ||
+            (!_checkMagic(bb.bytes(), 0) /*                        Missing magic start    */) ||
+            (!_checkMagic(bb.bytes(), fileLength - magicLength) /* Missing magic end      */) ||
+            (/*                                                    Invalid footer length  */
+            (footerLength = bb.readInt32(footerOffset = fileLength - magicAndPadding)) < 1 &&
+            (footerLength + magicX2AndPadding > fileLength))) {
+            throw new Error('Invalid file');
+        }
+        bb.setPosition(footerOffset - footerLength);
+        let footer = Footer.getRootAsFooter(bb), schema = footer.schema();
+        for (let i = -1, n = footer.dictionariesLength(); ++i < n;) {
+            let block = footer.dictionaries(i);
+            bb.setPosition(block.offset().low);
+            for (let batch of readMessageBatches(bb)) {
+                yield { schema, batch };
+                break;
+            }
+        }
+        for (let i = -1, n = footer.recordBatchesLength(); ++i < n;) {
+            const block = footer.recordBatches(i);
+            bb.setPosition(block.offset().low);
+            for (let batch of readMessageBatches(bb)) {
+                yield { schema, batch };
+                break;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/reader/message.ts
----------------------------------------------------------------------
diff --git a/js/src/reader/message.ts b/js/src/reader/message.ts
new file mode 100644
index 0000000..5472f10
--- /dev/null
+++ b/js/src/reader/message.ts
@@ -0,0 +1,63 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { flatbuffers } from 'flatbuffers';
+import * as Message_ from '../format/Message_generated';
+import ByteBuffer = flatbuffers.ByteBuffer;
+import Message = Message_.org.apache.arrow.flatbuf.Message;
+import MessageHeader = Message_.org.apache.arrow.flatbuf.MessageHeader;
+import RecordBatch = Message_.org.apache.arrow.flatbuf.RecordBatch;
+import DictionaryBatch = Message_.org.apache.arrow.flatbuf.DictionaryBatch;
+
+export const PADDING = 4;
+export type MessageBatch = {
+    id?: string;
+    offset: number;
+    bytes: Uint8Array;
+    data: RecordBatch;
+};
+
+export function* readMessages(bb: ByteBuffer) {
+    let message, length;
+    while (bb.position() < bb.capacity() &&
+          (length = bb.readInt32(bb.position())) > 0) {
+        bb.setPosition(bb.position() + PADDING);
+        message = Message.getRootAsMessage(bb);
+        bb.setPosition(bb.position() + length);
+        yield message;
+    }
+}
+
+export function* readMessageBatches(bb: ByteBuffer) {
+    let bytes = bb.bytes();
+    for (let message of readMessages(bb)) {
+        let type = message.headerType();
+        let id: string, data: RecordBatch;
+        if (type === MessageHeader.RecordBatch) {
+            data = message.header(new RecordBatch());
+        } else if (type === MessageHeader.DictionaryBatch) {
+            let header = message.header(new DictionaryBatch());
+            id = header.id().toFloat64().toString();
+            data = header.data();
+        } else {
+            continue;
+        }
+        yield <MessageBatch> { id, data, bytes, offset: bytes.byteOffset + bb.position() };
+        // position the buffer after the body to read the next message
+        bb.setPosition(bb.position() + message.bodyLength().low);
+    }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/reader/stream.ts
----------------------------------------------------------------------
diff --git a/js/src/reader/stream.ts b/js/src/reader/stream.ts
new file mode 100644
index 0000000..9869f63
--- /dev/null
+++ b/js/src/reader/stream.ts
@@ -0,0 +1,43 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { _checkMagic } from './file';
+import { flatbuffers } from 'flatbuffers';
+import * as Schema_ from '../format/Schema_generated';
+import * as Message_ from '../format/Message_generated';
+import { readMessages, readMessageBatches } from './message';
+
+import ByteBuffer = flatbuffers.ByteBuffer;
+import Schema = Schema_.org.apache.arrow.flatbuf.Schema;
+import MessageHeader = Message_.org.apache.arrow.flatbuf.MessageHeader;
+
+export function* readStream(...bbs: ByteBuffer[]) {
+    if (!bbs.length || _checkMagic(bbs[0].bytes(), 0)) {
+        throw new Error('Invalid Arrow Stream');
+    }
+    for (const message of readMessages(bbs[0])) {
+        if (message.headerType() === MessageHeader.Schema) {
+            const schema = message.header(new Schema());
+            for (const bb of bbs) {
+                for (const batch of readMessageBatches(bb)) {
+                    yield { schema, batch };
+                }
+            }
+            break;
+        }
+    }
+}


[6/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/closure-compiler-scripts/Schema_generated.js
----------------------------------------------------------------------
diff --git a/js/closure-compiler-scripts/Schema_generated.js b/js/closure-compiler-scripts/Schema_generated.js
new file mode 100644
index 0000000..5b76443
--- /dev/null
+++ b/js/closure-compiler-scripts/Schema_generated.js
@@ -0,0 +1,2231 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+goog.module("module$targets$es5$cls$format$Schema_generated");
+goog.module.declareLegacyNamespace();
+
+/**
+ * @const
+ * @namespace
+ */
+var org = org || {};
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache = org.apache || {};
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache.arrow = org.apache.arrow || {};
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache.arrow.flatbuf = org.apache.arrow.flatbuf || {};
+
+/**
+ * @enum
+ */
+org.apache.arrow.flatbuf.MetadataVersion = {
+  V1: 0, 0: 'V1',
+  V2: 1, 1: 'V2',
+  V3: 2, 2: 'V3',
+};
+
+/**
+ * @enum
+ */
+org.apache.arrow.flatbuf.UnionMode = {
+  Sparse: 0, 0: 'Sparse',
+  Dense: 1, 1: 'Dense',
+};
+
+/**
+ * @enum
+ */
+org.apache.arrow.flatbuf.Precision = {
+  HALF: 0, 0: 'HALF',
+  SINGLE: 1, 1: 'SINGLE',
+  DOUBLE: 2, 2: 'DOUBLE',
+};
+
+/**
+ * @enum
+ */
+org.apache.arrow.flatbuf.DateUnit = {
+  DAY: 0, 0: 'DAY',
+  MILLISECOND: 1, 1: 'MILLISECOND',
+};
+
+/**
+ * @enum
+ */
+org.apache.arrow.flatbuf.TimeUnit = {
+  SECOND: 0, 0: 'SECOND',
+  MILLISECOND: 1, 1: 'MILLISECOND',
+  MICROSECOND: 2, 2: 'MICROSECOND',
+  NANOSECOND: 3, 3: 'NANOSECOND',
+};
+
+/**
+ * @enum
+ */
+org.apache.arrow.flatbuf.IntervalUnit = {
+  YEAR_MONTH: 0, 0: 'YEAR_MONTH',
+  DAY_TIME: 1, 1: 'DAY_TIME',
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * Top-level Type value, enabling extensible type-specific metadata. We can
+ * add new logical types to Type without breaking backwards compatibility
+ *
+ * @enum
+ */
+org.apache.arrow.flatbuf.Type = {
+  NONE: 0, 0: 'NONE',
+  Null: 1, 1: 'Null',
+  Int: 2, 2: 'Int',
+  FloatingPoint: 3, 3: 'FloatingPoint',
+  Binary: 4, 4: 'Binary',
+  Utf8: 5, 5: 'Utf8',
+  Bool: 6, 6: 'Bool',
+  Decimal: 7, 7: 'Decimal',
+  Date: 8, 8: 'Date',
+  Time: 9, 9: 'Time',
+  Timestamp: 10, 10: 'Timestamp',
+  Interval: 11, 11: 'Interval',
+  List: 12, 12: 'List',
+  Struct_: 13, 13: 'Struct_',
+  Union: 14, 14: 'Union',
+  FixedSizeBinary: 15, 15: 'FixedSizeBinary',
+  FixedSizeList: 16, 16: 'FixedSizeList',
+  Map: 17, 17: 'Map',
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * The possible types of a vector
+ *
+ * @enum
+ */
+org.apache.arrow.flatbuf.VectorType = {
+  /**
+   * used in List type, Dense Union and variable length primitive types (String, Binary)
+   */
+  OFFSET: 0, 0: 'OFFSET',
+
+  /**
+   * actual data, either wixed width primitive types in slots or variable width delimited by an OFFSET vector
+   */
+  DATA: 1, 1: 'DATA',
+
+  /**
+   * Bit vector indicating if each value is null
+   */
+  VALIDITY: 2, 2: 'VALIDITY',
+
+  /**
+   * Type vector used in Union type
+   */
+  TYPE: 3, 3: 'TYPE',
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * Endianness of the platform producing the data
+ *
+ * @enum
+ */
+org.apache.arrow.flatbuf.Endianness = {
+  Little: 0, 0: 'Little',
+  Big: 1, 1: 'Big',
+};
+
+/**
+ * These are stored in the flatbuffer in the Type union below
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Null = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Null}
+ */
+org.apache.arrow.flatbuf.Null.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Null=} obj
+ * @returns {org.apache.arrow.flatbuf.Null}
+ */
+org.apache.arrow.flatbuf.Null.getRootAsNull = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Null).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Null.startNull = function(builder) {
+  builder.startObject(0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Null.endNull = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * A Struct_ in the flatbuffer metadata is the same as an Arrow Struct
+ * (according to the physical memory layout). We used Struct_ here as
+ * Struct is a reserved word in Flatbuffers
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Struct_ = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Struct_}
+ */
+org.apache.arrow.flatbuf.Struct_.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Struct_=} obj
+ * @returns {org.apache.arrow.flatbuf.Struct_}
+ */
+org.apache.arrow.flatbuf.Struct_.getRootAsStruct_ = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Struct_).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Struct_.startStruct_ = function(builder) {
+  builder.startObject(0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Struct_.endStruct_ = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.List = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.List}
+ */
+org.apache.arrow.flatbuf.List.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.List=} obj
+ * @returns {org.apache.arrow.flatbuf.List}
+ */
+org.apache.arrow.flatbuf.List.getRootAsList = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.List).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.List.startList = function(builder) {
+  builder.startObject(0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.List.endList = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.FixedSizeList = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.FixedSizeList}
+ */
+org.apache.arrow.flatbuf.FixedSizeList.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.FixedSizeList=} obj
+ * @returns {org.apache.arrow.flatbuf.FixedSizeList}
+ */
+org.apache.arrow.flatbuf.FixedSizeList.getRootAsFixedSizeList = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.FixedSizeList).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * Number of list items per value
+ *
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.FixedSizeList.prototype.listSize = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.FixedSizeList.startFixedSizeList = function(builder) {
+  builder.startObject(1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} listSize
+ */
+org.apache.arrow.flatbuf.FixedSizeList.addListSize = function(builder, listSize) {
+  builder.addFieldInt32(0, listSize, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.FixedSizeList.endFixedSizeList = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * A Map is a logical nested type that is represented as
+ *
+ * List<entry: Struct<key: K, value: V>>
+ *
+ * In this layout, the keys and values are each respectively contiguous. We do
+ * not constrain the key and value types, so the application is responsible
+ * for ensuring that the keys are hashable and unique. Whether the keys are sorted
+ * may be set in the metadata for this field
+ *
+ * In a Field with Map type, the Field has a child Struct field, which then
+ * has two children: key type and the second the value type. The names of the
+ * child fields may be respectively "entry", "key", and "value", but this is
+ * not enforced
+ *
+ * Map
+ *   - child[0] entry: Struct
+ *     - child[0] key: K
+ *     - child[1] value: V
+ *
+ * Neither the "entry" field nor the "key" field may be nullable.
+ *
+ * The metadata is structured so that Arrow systems without special handling
+ * for Map can make Map an alias for List. The "layout" attribute for the Map
+ * field must have the same contents as a List.
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Map = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Map}
+ */
+org.apache.arrow.flatbuf.Map.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Map=} obj
+ * @returns {org.apache.arrow.flatbuf.Map}
+ */
+org.apache.arrow.flatbuf.Map.getRootAsMap = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Map).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * Set to true if the keys within each value are sorted
+ *
+ * @returns {boolean}
+ */
+org.apache.arrow.flatbuf.Map.prototype.keysSorted = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Map.startMap = function(builder) {
+  builder.startObject(1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {boolean} keysSorted
+ */
+org.apache.arrow.flatbuf.Map.addKeysSorted = function(builder, keysSorted) {
+  builder.addFieldInt8(0, +keysSorted, +false);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Map.endMap = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * A union is a complex type with children in Field
+ * By default ids in the type vector refer to the offsets in the children
+ * optionally typeIds provides an indirection between the child offset and the type id
+ * for each child typeIds[offset] is the id used in the type vector
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Union = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Union}
+ */
+org.apache.arrow.flatbuf.Union.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Union=} obj
+ * @returns {org.apache.arrow.flatbuf.Union}
+ */
+org.apache.arrow.flatbuf.Union.getRootAsUnion = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Union).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.UnionMode}
+ */
+org.apache.arrow.flatbuf.Union.prototype.mode = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.UnionMode} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.UnionMode.Sparse;
+};
+
+/**
+ * @param {number} index
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Union.prototype.typeIds = function(index) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.readInt32(this.bb.__vector(this.bb_pos + offset) + index * 4) : 0;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Union.prototype.typeIdsLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @returns {Int32Array}
+ */
+org.apache.arrow.flatbuf.Union.prototype.typeIdsArray = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? new Int32Array(this.bb.bytes().buffer, this.bb.bytes().byteOffset + this.bb.__vector(this.bb_pos + offset), this.bb.__vector_len(this.bb_pos + offset)) : null;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Union.startUnion = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.UnionMode} mode
+ */
+org.apache.arrow.flatbuf.Union.addMode = function(builder, mode) {
+  builder.addFieldInt16(0, mode, org.apache.arrow.flatbuf.UnionMode.Sparse);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} typeIdsOffset
+ */
+org.apache.arrow.flatbuf.Union.addTypeIds = function(builder, typeIdsOffset) {
+  builder.addFieldOffset(1, typeIdsOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {Array.<number>} data
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Union.createTypeIdsVector = function(builder, data) {
+  builder.startVector(4, data.length, 4);
+  for (var i = data.length - 1; i >= 0; i--) {
+    builder.addInt32(data[i]);
+  }
+  return builder.endVector();
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Union.startTypeIdsVector = function(builder, numElems) {
+  builder.startVector(4, numElems, 4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Union.endUnion = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Int = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Int}
+ */
+org.apache.arrow.flatbuf.Int.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Int=} obj
+ * @returns {org.apache.arrow.flatbuf.Int}
+ */
+org.apache.arrow.flatbuf.Int.getRootAsInt = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Int).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Int.prototype.bitWidth = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @returns {boolean}
+ */
+org.apache.arrow.flatbuf.Int.prototype.isSigned = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Int.startInt = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} bitWidth
+ */
+org.apache.arrow.flatbuf.Int.addBitWidth = function(builder, bitWidth) {
+  builder.addFieldInt32(0, bitWidth, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {boolean} isSigned
+ */
+org.apache.arrow.flatbuf.Int.addIsSigned = function(builder, isSigned) {
+  builder.addFieldInt8(1, +isSigned, +false);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Int.endInt = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.FloatingPoint = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.FloatingPoint}
+ */
+org.apache.arrow.flatbuf.FloatingPoint.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.FloatingPoint=} obj
+ * @returns {org.apache.arrow.flatbuf.FloatingPoint}
+ */
+org.apache.arrow.flatbuf.FloatingPoint.getRootAsFloatingPoint = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.FloatingPoint).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.Precision}
+ */
+org.apache.arrow.flatbuf.FloatingPoint.prototype.precision = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.Precision} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Precision.HALF;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.FloatingPoint.startFloatingPoint = function(builder) {
+  builder.startObject(1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.Precision} precision
+ */
+org.apache.arrow.flatbuf.FloatingPoint.addPrecision = function(builder, precision) {
+  builder.addFieldInt16(0, precision, org.apache.arrow.flatbuf.Precision.HALF);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.FloatingPoint.endFloatingPoint = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * Unicode with UTF-8 encoding
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Utf8 = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Utf8}
+ */
+org.apache.arrow.flatbuf.Utf8.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Utf8=} obj
+ * @returns {org.apache.arrow.flatbuf.Utf8}
+ */
+org.apache.arrow.flatbuf.Utf8.getRootAsUtf8 = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Utf8).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Utf8.startUtf8 = function(builder) {
+  builder.startObject(0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Utf8.endUtf8 = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Binary = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Binary}
+ */
+org.apache.arrow.flatbuf.Binary.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Binary=} obj
+ * @returns {org.apache.arrow.flatbuf.Binary}
+ */
+org.apache.arrow.flatbuf.Binary.getRootAsBinary = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Binary).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Binary.startBinary = function(builder) {
+  builder.startObject(0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Binary.endBinary = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.FixedSizeBinary = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.FixedSizeBinary}
+ */
+org.apache.arrow.flatbuf.FixedSizeBinary.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.FixedSizeBinary=} obj
+ * @returns {org.apache.arrow.flatbuf.FixedSizeBinary}
+ */
+org.apache.arrow.flatbuf.FixedSizeBinary.getRootAsFixedSizeBinary = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.FixedSizeBinary).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * Number of bytes per value
+ *
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.FixedSizeBinary.prototype.byteWidth = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.FixedSizeBinary.startFixedSizeBinary = function(builder) {
+  builder.startObject(1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} byteWidth
+ */
+org.apache.arrow.flatbuf.FixedSizeBinary.addByteWidth = function(builder, byteWidth) {
+  builder.addFieldInt32(0, byteWidth, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.FixedSizeBinary.endFixedSizeBinary = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Bool = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Bool}
+ */
+org.apache.arrow.flatbuf.Bool.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Bool=} obj
+ * @returns {org.apache.arrow.flatbuf.Bool}
+ */
+org.apache.arrow.flatbuf.Bool.getRootAsBool = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Bool).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Bool.startBool = function(builder) {
+  builder.startObject(0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Bool.endBool = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Decimal = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Decimal}
+ */
+org.apache.arrow.flatbuf.Decimal.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Decimal=} obj
+ * @returns {org.apache.arrow.flatbuf.Decimal}
+ */
+org.apache.arrow.flatbuf.Decimal.getRootAsDecimal = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Decimal).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * Total number of decimal digits
+ *
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Decimal.prototype.precision = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+};
+
+/**
+ * Number of digits after the decimal point "."
+ *
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Decimal.prototype.scale = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.readInt32(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Decimal.startDecimal = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} precision
+ */
+org.apache.arrow.flatbuf.Decimal.addPrecision = function(builder, precision) {
+  builder.addFieldInt32(0, precision, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} scale
+ */
+org.apache.arrow.flatbuf.Decimal.addScale = function(builder, scale) {
+  builder.addFieldInt32(1, scale, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Decimal.endDecimal = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * Date is either a 32-bit or 64-bit type representing elapsed time since UNIX
+ * epoch (1970-01-01), stored in either of two units:
+ *
+ * * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no
+ *   leap seconds), where the values are evenly divisible by 86400000
+ * * Days (32 bits) since the UNIX epoch
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Date = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Date}
+ */
+org.apache.arrow.flatbuf.Date.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Date=} obj
+ * @returns {org.apache.arrow.flatbuf.Date}
+ */
+org.apache.arrow.flatbuf.Date.getRootAsDate = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Date).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.DateUnit}
+ */
+org.apache.arrow.flatbuf.Date.prototype.unit = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.DateUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.DateUnit.MILLISECOND;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Date.startDate = function(builder) {
+  builder.startObject(1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.DateUnit} unit
+ */
+org.apache.arrow.flatbuf.Date.addUnit = function(builder, unit) {
+  builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.DateUnit.MILLISECOND);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Date.endDate = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * Time type. The physical storage type depends on the unit
+ * - SECOND and MILLISECOND: 32 bits
+ * - MICROSECOND and NANOSECOND: 64 bits
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Time = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Time}
+ */
+org.apache.arrow.flatbuf.Time.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Time=} obj
+ * @returns {org.apache.arrow.flatbuf.Time}
+ */
+org.apache.arrow.flatbuf.Time.getRootAsTime = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Time).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.TimeUnit}
+ */
+org.apache.arrow.flatbuf.Time.prototype.unit = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.TimeUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.TimeUnit.MILLISECOND;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Time.prototype.bitWidth = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.readInt32(this.bb_pos + offset) : 32;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Time.startTime = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.TimeUnit} unit
+ */
+org.apache.arrow.flatbuf.Time.addUnit = function(builder, unit) {
+  builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.TimeUnit.MILLISECOND);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} bitWidth
+ */
+org.apache.arrow.flatbuf.Time.addBitWidth = function(builder, bitWidth) {
+  builder.addFieldInt32(1, bitWidth, 32);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Time.endTime = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * Time elapsed from the Unix epoch, 00:00:00.000 on 1 January 1970, excluding
+ * leap seconds, as a 64-bit integer. Note that UNIX time does not include
+ * leap seconds.
+ *
+ * The Timestamp metadata supports both "time zone naive" and "time zone
+ * aware" timestamps. Read about the timezone attribute for more detail
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Timestamp = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Timestamp}
+ */
+org.apache.arrow.flatbuf.Timestamp.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Timestamp=} obj
+ * @returns {org.apache.arrow.flatbuf.Timestamp}
+ */
+org.apache.arrow.flatbuf.Timestamp.getRootAsTimestamp = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Timestamp).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.TimeUnit}
+ */
+org.apache.arrow.flatbuf.Timestamp.prototype.unit = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.TimeUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.TimeUnit.SECOND;
+};
+
+/**
+ * The time zone is a string indicating the name of a time zone, one of:
+ *
+ * * As used in the Olson time zone database (the "tz database" or
+ *   "tzdata"), such as "America/New_York"
+ * * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30
+ *
+ * Whether a timezone string is present indicates different semantics about
+ * the data:
+ *
+ * * If the time zone is null or equal to an empty string, the data is "time
+ *   zone naive" and shall be displayed *as is* to the user, not localized
+ *   to the locale of the user. This data can be though of as UTC but
+ *   without having "UTC" as the time zone, it is not considered to be
+ *   localized to any time zone
+ *
+ * * If the time zone is set to a valid value, values can be displayed as
+ *   "localized" to that time zone, even though the underlying 64-bit
+ *   integers are identical to the same data stored in UTC. Converting
+ *   between time zones is a metadata-only operation and does not change the
+ *   underlying values
+ *
+ * @param {flatbuffers.Encoding=} optionalEncoding
+ * @returns {string|Uint8Array|null}
+ */
+org.apache.arrow.flatbuf.Timestamp.prototype.timezone = function(optionalEncoding) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Timestamp.startTimestamp = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.TimeUnit} unit
+ */
+org.apache.arrow.flatbuf.Timestamp.addUnit = function(builder, unit) {
+  builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.TimeUnit.SECOND);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} timezoneOffset
+ */
+org.apache.arrow.flatbuf.Timestamp.addTimezone = function(builder, timezoneOffset) {
+  builder.addFieldOffset(1, timezoneOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Timestamp.endTimestamp = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Interval = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Interval}
+ */
+org.apache.arrow.flatbuf.Interval.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Interval=} obj
+ * @returns {org.apache.arrow.flatbuf.Interval}
+ */
+org.apache.arrow.flatbuf.Interval.getRootAsInterval = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Interval).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.IntervalUnit}
+ */
+org.apache.arrow.flatbuf.Interval.prototype.unit = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.IntervalUnit} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.IntervalUnit.YEAR_MONTH;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Interval.startInterval = function(builder) {
+  builder.startObject(1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.IntervalUnit} unit
+ */
+org.apache.arrow.flatbuf.Interval.addUnit = function(builder, unit) {
+  builder.addFieldInt16(0, unit, org.apache.arrow.flatbuf.IntervalUnit.YEAR_MONTH);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Interval.endInterval = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * represents the physical layout of a buffer
+ * buffers have fixed width slots of a given type
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.VectorLayout = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.VectorLayout}
+ */
+org.apache.arrow.flatbuf.VectorLayout.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.VectorLayout=} obj
+ * @returns {org.apache.arrow.flatbuf.VectorLayout}
+ */
+org.apache.arrow.flatbuf.VectorLayout.getRootAsVectorLayout = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.VectorLayout).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * the width of a slot in the buffer (typically 1, 8, 16, 32 or 64)
+ *
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.VectorLayout.prototype.bitWidth = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt16(this.bb_pos + offset) : 0;
+};
+
+/**
+ * the purpose of the vector
+ *
+ * @returns {org.apache.arrow.flatbuf.VectorType}
+ */
+org.apache.arrow.flatbuf.VectorLayout.prototype.type = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? /** @type {org.apache.arrow.flatbuf.VectorType} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.VectorType.OFFSET;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.VectorLayout.startVectorLayout = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} bitWidth
+ */
+org.apache.arrow.flatbuf.VectorLayout.addBitWidth = function(builder, bitWidth) {
+  builder.addFieldInt16(0, bitWidth, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.VectorType} type
+ */
+org.apache.arrow.flatbuf.VectorLayout.addType = function(builder, type) {
+  builder.addFieldInt16(1, type, org.apache.arrow.flatbuf.VectorType.OFFSET);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.VectorLayout.endVectorLayout = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * user defined key value pairs to add custom metadata to arrow
+ * key namespacing is the responsibility of the user
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.KeyValue = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.KeyValue}
+ */
+org.apache.arrow.flatbuf.KeyValue.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.KeyValue=} obj
+ * @returns {org.apache.arrow.flatbuf.KeyValue}
+ */
+org.apache.arrow.flatbuf.KeyValue.getRootAsKeyValue = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.KeyValue).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Encoding=} optionalEncoding
+ * @returns {string|Uint8Array|null}
+ */
+org.apache.arrow.flatbuf.KeyValue.prototype.key = function(optionalEncoding) {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+};
+
+/**
+ * @param {flatbuffers.Encoding=} optionalEncoding
+ * @returns {string|Uint8Array|null}
+ */
+org.apache.arrow.flatbuf.KeyValue.prototype.value = function(optionalEncoding) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.KeyValue.startKeyValue = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} keyOffset
+ */
+org.apache.arrow.flatbuf.KeyValue.addKey = function(builder, keyOffset) {
+  builder.addFieldOffset(0, keyOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} valueOffset
+ */
+org.apache.arrow.flatbuf.KeyValue.addValue = function(builder, valueOffset) {
+  builder.addFieldOffset(1, valueOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.KeyValue.endKeyValue = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * Dictionary encoding metadata
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.DictionaryEncoding}
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.DictionaryEncoding=} obj
+ * @returns {org.apache.arrow.flatbuf.DictionaryEncoding}
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.getRootAsDictionaryEncoding = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.DictionaryEncoding).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * The known dictionary id in the application where this data is used. In
+ * the file or streaming formats, the dictionary ids are found in the
+ * DictionaryBatch messages
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.prototype.id = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+};
+
+/**
+ * The dictionary indices are constrained to be positive integers. If this
+ * field is null, the indices must be signed int32
+ *
+ * @param {org.apache.arrow.flatbuf.Int=} obj
+ * @returns {org.apache.arrow.flatbuf.Int|null}
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.prototype.indexType = function(obj) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? (obj || new org.apache.arrow.flatbuf.Int).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+};
+
+/**
+ * By default, dictionaries are not ordered, or the order does not have
+ * semantic meaning. In some statistical, applications, dictionary-encoding
+ * is used to represent ordered categorical data, and we provide a way to
+ * preserve that metadata here
+ *
+ * @returns {boolean}
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.prototype.isOrdered = function() {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.startDictionaryEncoding = function(builder) {
+  builder.startObject(3);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Long} id
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.addId = function(builder, id) {
+  builder.addFieldInt64(0, id, builder.createLong(0, 0));
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} indexTypeOffset
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.addIndexType = function(builder, indexTypeOffset) {
+  builder.addFieldOffset(1, indexTypeOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {boolean} isOrdered
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.addIsOrdered = function(builder, isOrdered) {
+  builder.addFieldInt8(2, +isOrdered, +false);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.DictionaryEncoding.endDictionaryEncoding = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * A field represents a named column in a record / row batch or child of a
+ * nested type.
+ *
+ * - children is only for nested Arrow arrays
+ * - For primitive types, children will have length 0
+ * - nullable should default to true in general
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Field = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Field}
+ */
+org.apache.arrow.flatbuf.Field.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Field=} obj
+ * @returns {org.apache.arrow.flatbuf.Field}
+ */
+org.apache.arrow.flatbuf.Field.getRootAsField = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Field).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @param {flatbuffers.Encoding=} optionalEncoding
+ * @returns {string|Uint8Array|null}
+ */
+org.apache.arrow.flatbuf.Field.prototype.name = function(optionalEncoding) {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
+};
+
+/**
+ * @returns {boolean}
+ */
+org.apache.arrow.flatbuf.Field.prototype.nullable = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.Type}
+ */
+org.apache.arrow.flatbuf.Field.prototype.typeType = function() {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? /** @type {org.apache.arrow.flatbuf.Type} */ (this.bb.readUint8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Type.NONE;
+};
+
+/**
+ * @param {flatbuffers.Table} obj
+ * @returns {?flatbuffers.Table}
+ */
+org.apache.arrow.flatbuf.Field.prototype.type = function(obj) {
+  var offset = this.bb.__offset(this.bb_pos, 10);
+  return offset ? this.bb.__union(obj, this.bb_pos + offset) : null;
+};
+
+/**
+ * @param {org.apache.arrow.flatbuf.DictionaryEncoding=} obj
+ * @returns {org.apache.arrow.flatbuf.DictionaryEncoding|null}
+ */
+org.apache.arrow.flatbuf.Field.prototype.dictionary = function(obj) {
+  var offset = this.bb.__offset(this.bb_pos, 12);
+  return offset ? (obj || new org.apache.arrow.flatbuf.DictionaryEncoding).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+};
+
+/**
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.Field=} obj
+ * @returns {org.apache.arrow.flatbuf.Field}
+ */
+org.apache.arrow.flatbuf.Field.prototype.children = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 14);
+  return offset ? (obj || new org.apache.arrow.flatbuf.Field).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Field.prototype.childrenLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 14);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * layout of buffers produced for this type (as derived from the Type)
+ * does not include children
+ * each recordbatch will return instances of those Buffers.
+ *
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.VectorLayout=} obj
+ * @returns {org.apache.arrow.flatbuf.VectorLayout}
+ */
+org.apache.arrow.flatbuf.Field.prototype.layout = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 16);
+  return offset ? (obj || new org.apache.arrow.flatbuf.VectorLayout).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Field.prototype.layoutLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 16);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.KeyValue=} obj
+ * @returns {org.apache.arrow.flatbuf.KeyValue}
+ */
+org.apache.arrow.flatbuf.Field.prototype.customMetadata = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 18);
+  return offset ? (obj || new org.apache.arrow.flatbuf.KeyValue).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Field.prototype.customMetadataLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 18);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Field.startField = function(builder) {
+  builder.startObject(8);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} nameOffset
+ */
+org.apache.arrow.flatbuf.Field.addName = function(builder, nameOffset) {
+  builder.addFieldOffset(0, nameOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {boolean} nullable
+ */
+org.apache.arrow.flatbuf.Field.addNullable = function(builder, nullable) {
+  builder.addFieldInt8(1, +nullable, +false);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.Type} typeType
+ */
+org.apache.arrow.flatbuf.Field.addTypeType = function(builder, typeType) {
+  builder.addFieldInt8(2, typeType, org.apache.arrow.flatbuf.Type.NONE);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} typeOffset
+ */
+org.apache.arrow.flatbuf.Field.addType = function(builder, typeOffset) {
+  builder.addFieldOffset(3, typeOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} dictionaryOffset
+ */
+org.apache.arrow.flatbuf.Field.addDictionary = function(builder, dictionaryOffset) {
+  builder.addFieldOffset(4, dictionaryOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} childrenOffset
+ */
+org.apache.arrow.flatbuf.Field.addChildren = function(builder, childrenOffset) {
+  builder.addFieldOffset(5, childrenOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {Array.<flatbuffers.Offset>} data
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Field.createChildrenVector = function(builder, data) {
+  builder.startVector(4, data.length, 4);
+  for (var i = data.length - 1; i >= 0; i--) {
+    builder.addOffset(data[i]);
+  }
+  return builder.endVector();
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Field.startChildrenVector = function(builder, numElems) {
+  builder.startVector(4, numElems, 4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} layoutOffset
+ */
+org.apache.arrow.flatbuf.Field.addLayout = function(builder, layoutOffset) {
+  builder.addFieldOffset(6, layoutOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {Array.<flatbuffers.Offset>} data
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Field.createLayoutVector = function(builder, data) {
+  builder.startVector(4, data.length, 4);
+  for (var i = data.length - 1; i >= 0; i--) {
+    builder.addOffset(data[i]);
+  }
+  return builder.endVector();
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Field.startLayoutVector = function(builder, numElems) {
+  builder.startVector(4, numElems, 4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} customMetadataOffset
+ */
+org.apache.arrow.flatbuf.Field.addCustomMetadata = function(builder, customMetadataOffset) {
+  builder.addFieldOffset(7, customMetadataOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {Array.<flatbuffers.Offset>} data
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Field.createCustomMetadataVector = function(builder, data) {
+  builder.startVector(4, data.length, 4);
+  for (var i = data.length - 1; i >= 0; i--) {
+    builder.addOffset(data[i]);
+  }
+  return builder.endVector();
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Field.startCustomMetadataVector = function(builder, numElems) {
+  builder.startVector(4, numElems, 4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Field.endField = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * A Buffer represents a single contiguous memory segment
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Buffer = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Buffer}
+ */
+org.apache.arrow.flatbuf.Buffer.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * The shared memory page id where this buffer is located. Currently this is
+ * not used
+ *
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Buffer.prototype.page = function() {
+  return this.bb.readInt32(this.bb_pos);
+};
+
+/**
+ * The relative offset into the shared memory page where the bytes for this
+ * buffer starts
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.Buffer.prototype.offset = function() {
+  return this.bb.readInt64(this.bb_pos + 8);
+};
+
+/**
+ * The absolute length (in bytes) of the memory buffer. The memory is found
+ * from offset (inclusive) to offset + length (non-inclusive).
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.Buffer.prototype.length = function() {
+  return this.bb.readInt64(this.bb_pos + 16);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} page
+ * @param {flatbuffers.Long} offset
+ * @param {flatbuffers.Long} length
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Buffer.createBuffer = function(builder, page, offset, length) {
+  builder.prep(8, 24);
+  builder.writeInt64(length);
+  builder.writeInt64(offset);
+  builder.pad(4);
+  builder.writeInt32(page);
+  return builder.offset();
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * A Schema describes the columns in a row batch
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Schema = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Schema}
+ */
+org.apache.arrow.flatbuf.Schema.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Schema=} obj
+ * @returns {org.apache.arrow.flatbuf.Schema}
+ */
+org.apache.arrow.flatbuf.Schema.getRootAsSchema = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Schema).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * endianness of the buffer
+ * it is Little Endian by default
+ * if endianness doesn't match the underlying system then the vectors need to be converted
+ *
+ * @returns {org.apache.arrow.flatbuf.Endianness}
+ */
+org.apache.arrow.flatbuf.Schema.prototype.endianness = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.Endianness} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.Endianness.Little;
+};
+
+/**
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.Field=} obj
+ * @returns {org.apache.arrow.flatbuf.Field}
+ */
+org.apache.arrow.flatbuf.Schema.prototype.fields = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? (obj || new org.apache.arrow.flatbuf.Field).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Schema.prototype.fieldsLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.KeyValue=} obj
+ * @returns {org.apache.arrow.flatbuf.KeyValue}
+ */
+org.apache.arrow.flatbuf.Schema.prototype.customMetadata = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? (obj || new org.apache.arrow.flatbuf.KeyValue).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos + offset) + index * 4), this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Schema.prototype.customMetadataLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Schema.startSchema = function(builder) {
+  builder.startObject(3);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.Endianness} endianness
+ */
+org.apache.arrow.flatbuf.Schema.addEndianness = function(builder, endianness) {
+  builder.addFieldInt16(0, endianness, org.apache.arrow.flatbuf.Endianness.Little);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} fieldsOffset
+ */
+org.apache.arrow.flatbuf.Schema.addFields = function(builder, fieldsOffset) {
+  builder.addFieldOffset(1, fieldsOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {Array.<flatbuffers.Offset>} data
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Schema.createFieldsVector = function(builder, data) {
+  builder.startVector(4, data.length, 4);
+  for (var i = data.length - 1; i >= 0; i--) {
+    builder.addOffset(data[i]);
+  }
+  return builder.endVector();
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Schema.startFieldsVector = function(builder, numElems) {
+  builder.startVector(4, numElems, 4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} customMetadataOffset
+ */
+org.apache.arrow.flatbuf.Schema.addCustomMetadata = function(builder, customMetadataOffset) {
+  builder.addFieldOffset(2, customMetadataOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {Array.<flatbuffers.Offset>} data
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Schema.createCustomMetadataVector = function(builder, data) {
+  builder.startVector(4, data.length, 4);
+  for (var i = data.length - 1; i >= 0; i--) {
+    builder.addOffset(data[i]);
+  }
+  return builder.endVector();
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Schema.startCustomMetadataVector = function(builder, numElems) {
+  builder.startVector(4, numElems, 4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Schema.endSchema = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} offset
+ */
+org.apache.arrow.flatbuf.Schema.finishSchemaBuffer = function(builder, offset) {
+  builder.finish(offset);
+};
+
+// Exports for Node.js and RequireJS
+exports.org = org;


[7/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation

Posted by we...@apache.org.
ARROW-1479: [JS] Expand JavaScript implementation

Finally PR'ing https://github.com/graphistry/arrow to the official Arrow project

Primary motivations:
- Multi-part ArrowReader to support async and streaming
- ArrowReader and related types as native JS iterators (and soon, async iterators)
- Unify vectors across RecordBatches into a single logical virtual instance
- Fast vector subarray/slice to dense typed Arrays (handy for GPU compute)

Example:

```js
import { readFileSync } from 'fs';
import { Table } from 'apache-arrow';

const table = Table.from(...[
    'latlong/schema.arrow',
    'latlong/records.arrow'
].map((file) => readFileSync(file)));

const column = table.getColumn('origin_lat');
const typed = column.slice();

assert(typed instanceof Float32Array);

for (let i = -1, n = column.length; ++i < n;) {
    assert(column.get(i) === typed[i]);
}

console.log(table.toString());

/*
        origin_lat,         origin_lon
35.393089294433594,  -97.6007308959961
35.393089294433594,  -97.6007308959961
35.393089294433594,  -97.6007308959961
29.533695220947266, -98.46977996826172
29.533695220947266, -98.46977996826172
*/
```

Todos:
- ArrowWriter
- Map type?
- [`readBuffersAsync`/`readStreamAsync`](https://github.com/graphistry/arrow/issues/8)
- fix prepublish script for Arrow project CI/deploy strategy

Author: Paul Taylor <pa...@me.com>

Closes #1062 from trxcllnt/new-arrow-js and squashes the following commits:

4f71e769 [Paul Taylor] add more rat excludes and delete changelog
fc01b80a [Paul Taylor] Update license/docs per Wes's feedback
5f66c097 [Paul Taylor] add .npmrc to rat exlusions
927ce614 [Paul Taylor] Merge branch 'master' into new-arrow-js
f6da81eb [Paul Taylor] Fix rat exlusions, add header to prepublish script
aff1fdb9 [Paul Taylor] Tweak the JS CI scripts, fix case-sensitive file name in git
8b9208a1 [Paul Taylor] Merge branch 'master' into new-arrow-js
4a74d856 [Paul Taylor] Update CI scripts for new Arrow JS lib
28261892 [Paul Taylor] chore(js): Move graphistry/arrow into apache/arrow


Project: http://git-wip-us.apache.org/repos/asf/arrow/repo
Commit: http://git-wip-us.apache.org/repos/asf/arrow/commit/0c8853f9
Tree: http://git-wip-us.apache.org/repos/asf/arrow/tree/0c8853f9
Diff: http://git-wip-us.apache.org/repos/asf/arrow/diff/0c8853f9

Branch: refs/heads/master
Commit: 0c8853f90612b485c853cb54acf34a820591ca1d
Parents: 8911a34
Author: Paul Taylor <pa...@me.com>
Authored: Fri Sep 8 18:42:08 2017 -0400
Committer: Wes McKinney <we...@twosigma.com>
Committed: Fri Sep 8 19:01:17 2017 -0400

----------------------------------------------------------------------
 ci/travis_before_script_js.sh                   |   10 -
 ci/travis_script_js.sh                          |    6 +-
 dev/release/rat_exclude_files.txt               |    5 +
 js/.gitignore                                   |   96 +-
 js/.npmignore                                   |    7 -
 js/.npmrc                                       |    1 +
 js/DEVELOP.md                                   |  140 ++
 js/LICENSE                                      |   39 +
 js/README.md                                    |  208 +-
 js/bin/arrow2csv.js                             |   92 +-
 js/bin/arrow_schema.js                          |   29 -
 js/bower.json                                   |   17 -
 js/closure-compiler-scripts/File_generated.js   |  264 ++
 .../Message_generated.js                        |  486 ++++
 js/closure-compiler-scripts/Schema_generated.js | 2231 +++++++++++++++++
 js/closure-compiler-scripts/flatbuffers.js      | 1204 ++++++++++
 js/closure-compiler-scripts/text-encoding.js    |  648 +++++
 js/closure-compiler-scripts/tslib.js            |  151 ++
 js/examples/read_file.html                      |   39 +-
 js/flatbuffers.sh                               |   30 -
 js/gulpfile.js                                  |  285 +++
 js/lerna.json                                   |    9 +
 js/package.json                                 |  153 +-
 js/perf/arrows/file/dictionary.arrow            |  Bin 0 -> 2522 bytes
 js/perf/arrows/file/simple.arrow                |  Bin 0 -> 1642 bytes
 js/perf/arrows/file/struct.arrow                |  Bin 0 -> 2354 bytes
 js/perf/arrows/multi/count/records.arrow        |  Bin 0 -> 224 bytes
 js/perf/arrows/multi/count/schema.arrow         |  Bin 0 -> 184 bytes
 js/perf/arrows/multi/latlong/records.arrow      |  Bin 0 -> 352 bytes
 js/perf/arrows/multi/latlong/schema.arrow       |  Bin 0 -> 264 bytes
 js/perf/arrows/multi/origins/records.arrow      |  Bin 0 -> 224 bytes
 js/perf/arrows/multi/origins/schema.arrow       |  Bin 0 -> 1604 bytes
 js/perf/arrows/stream/dictionary.arrow          |  Bin 0 -> 1776 bytes
 js/perf/arrows/stream/simple.arrow              |  Bin 0 -> 1188 bytes
 js/perf/arrows/stream/struct.arrow              |  Bin 0 -> 1884 bytes
 js/perf/config.js                               |   38 +
 js/perf/index.js                                |  113 +
 js/prepublish.sh                                |   26 +
 js/spec/arrow.js                                |  179 --
 js/spec/dictionary-stream.arrow                 |  Bin 1776 -> 0 bytes
 js/spec/dictionary.arrow                        |  Bin 2522 -> 0 bytes
 js/spec/simple-stream.arrow                     |  Bin 1188 -> 0 bytes
 js/spec/simple.arrow                            |  Bin 1642 -> 0 bytes
 js/spec/struct_example-stream.arrow             |  Bin 1884 -> 0 bytes
 js/spec/struct_example.arrow                    |  Bin 2354 -> 0 bytes
 js/src/Arrow.externs.ts                         |   67 +
 js/src/Arrow.internal.ts                        |  105 +
 js/src/Arrow.ts                                 |   31 +
 js/src/Arrow_generated.d.ts                     |    5 -
 js/src/arrow.ts                                 |  515 ----
 js/src/bitarray.ts                              |   42 -
 js/src/format/File_generated.ts                 |  240 ++
 js/src/format/Message_generated.ts              |  469 ++++
 js/src/format/Schema_generated.ts               | 2254 ++++++++++++++++++
 js/src/reader/arrow.ts                          |   78 +
 js/src/reader/dictionary.ts                     |   43 +
 js/src/reader/file.ts                           |   79 +
 js/src/reader/message.ts                        |   63 +
 js/src/reader/stream.ts                         |   43 +
 js/src/reader/vector.ts                         |  271 +++
 js/src/table.ts                                 |  133 ++
 js/src/types.ts                                 |  597 -----
 js/src/vector/dictionary.ts                     |   51 +
 js/src/vector/list.ts                           |  108 +
 js/src/vector/struct.ts                         |   39 +
 js/src/vector/typed.ts                          |  326 +++
 js/src/vector/vector.ts                         |   91 +
 js/test/Arrow.ts                                |   67 +
 js/test/__snapshots__/reader-tests.ts.snap      |  497 ++++
 js/test/__snapshots__/table-tests.ts.snap       | 1815 ++++++++++++++
 js/test/arrows/file/dictionary.arrow            |  Bin 0 -> 2522 bytes
 js/test/arrows/file/dictionary2.arrow           |  Bin 0 -> 2762 bytes
 js/test/arrows/file/multi_dictionary.arrow      |  Bin 0 -> 3482 bytes
 js/test/arrows/file/simple.arrow                |  Bin 0 -> 1642 bytes
 js/test/arrows/file/struct.arrow                |  Bin 0 -> 2354 bytes
 js/test/arrows/multi/count/records.arrow        |  Bin 0 -> 224 bytes
 js/test/arrows/multi/count/schema.arrow         |  Bin 0 -> 184 bytes
 js/test/arrows/multi/latlong/records.arrow      |  Bin 0 -> 352 bytes
 js/test/arrows/multi/latlong/schema.arrow       |  Bin 0 -> 264 bytes
 js/test/arrows/multi/origins/records.arrow      |  Bin 0 -> 224 bytes
 js/test/arrows/multi/origins/schema.arrow       |  Bin 0 -> 1604 bytes
 js/test/arrows/stream/dictionary.arrow          |  Bin 0 -> 1776 bytes
 js/test/arrows/stream/simple.arrow              |  Bin 0 -> 1188 bytes
 js/test/arrows/stream/struct.arrow              |  Bin 0 -> 1884 bytes
 js/test/reader-tests.ts                         |   50 +
 js/test/table-tests.ts                          |   88 +
 js/test/test-config.ts                          |   42 +
 js/test/tsconfig.json                           |   11 +
 js/test/vector-tests.ts                         |  226 ++
 js/tsconfig.json                                |   19 +-
 js/tsconfig/tsconfig.base.json                  |   25 +
 js/tsconfig/tsconfig.es2015.cjs.json            |    8 +
 js/tsconfig/tsconfig.es2015.cls.json            |   10 +
 js/tsconfig/tsconfig.es2015.esm.json            |    8 +
 js/tsconfig/tsconfig.es5.cjs.json               |    8 +
 js/tsconfig/tsconfig.es5.cls.json               |   10 +
 js/tsconfig/tsconfig.es5.esm.json               |    8 +
 js/tsconfig/tsconfig.esnext.cjs.json            |    8 +
 js/tsconfig/tsconfig.esnext.cls.json            |   10 +
 js/tsconfig/tsconfig.esnext.esm.json            |    8 +
 js/tslint.json                                  |   49 +-
 js/webpack.config.js                            |   54 -
 102 files changed, 13567 insertions(+), 1610 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/ci/travis_before_script_js.sh
----------------------------------------------------------------------
diff --git a/ci/travis_before_script_js.sh b/ci/travis_before_script_js.sh
index b72accc..1d6bece 100755
--- a/ci/travis_before_script_js.sh
+++ b/ci/travis_before_script_js.sh
@@ -21,19 +21,9 @@
 set -ex
 
 source $TRAVIS_BUILD_DIR/ci/travis_env_common.sh
-source $TRAVIS_BUILD_DIR/ci/travis_install_conda.sh
-
-# Download flatbuffers
-export FLATBUFFERS_HOME=$TRAVIS_BUILD_DIR/flatbuffers
-conda create -y -q -p $FLATBUFFERS_HOME python=2.7 flatbuffers
-export PATH="$FLATBUFFERS_HOME/bin:$PATH"
-
-npm install -g typescript
-npm install -g webpack
 
 pushd $ARROW_JS_DIR
 
 npm install
-npm run build
 
 popd

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/ci/travis_script_js.sh
----------------------------------------------------------------------
diff --git a/ci/travis_script_js.sh b/ci/travis_script_js.sh
index cb1e9e1..9f77dec 100755
--- a/ci/travis_script_js.sh
+++ b/ci/travis_script_js.sh
@@ -23,6 +23,10 @@ JS_DIR=${TRAVIS_BUILD_DIR}/js
 
 pushd $JS_DIR
 
-npm test
+npm run validate
+
+# Uncomment to use coveralls
+# npm run test:coverage
+# cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js;
 
 popd

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/dev/release/rat_exclude_files.txt
----------------------------------------------------------------------
diff --git a/dev/release/rat_exclude_files.txt b/dev/release/rat_exclude_files.txt
index 15a3893..83c9f19 100644
--- a/dev/release/rat_exclude_files.txt
+++ b/dev/release/rat_exclude_files.txt
@@ -1,6 +1,10 @@
+*.npmrc
 *.gitignore
 *_generated.h
+*_generated.js
+*_generated.ts
 *.json
+*.snap
 cpp/src/arrow/io/mman.h
 cpp/src/arrow/util/random.h
 cpp/src/arrow/status.cc
@@ -29,6 +33,7 @@ cpp/src/plasma/thirdparty/xxhash.cc
 cpp/src/plasma/thirdparty/xxhash.h
 dev/release/rat_exclude_files.txt
 js/.npmignore
+js/closure-compiler-scripts/*
 python/cmake_modules/BuildUtils.cmake
 python/cmake_modules/FindPythonLibsNew.cmake
 python/cmake_modules/FindNumPy.cmake

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/.gitignore
----------------------------------------------------------------------
diff --git a/js/.gitignore b/js/.gitignore
index ea5514f..b48f35b 100644
--- a/js/.gitignore
+++ b/js/.gitignore
@@ -1,7 +1,89 @@
-src/Arrow_generated.js
-lib
-lib-esm
-_bundles
-node_modules
-.idea
-*.iml
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+.vscode
+yarn.lock
+package-lock.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (http://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# Typescript v1 declaration files
+typings/
+
+# Optional npm cache directory
+.npm
+
+# JS package manager files
+yarn.lock
+package-lock.json
+
+# Optional eslint cache
+.eslintcache
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variables file
+.env
+
+# compilation targets
+dist
+targets/es5
+targets/es2015
+targets/esnext

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/.npmignore
----------------------------------------------------------------------
diff --git a/js/.npmignore b/js/.npmignore
deleted file mode 100644
index 333aeec..0000000
--- a/js/.npmignore
+++ /dev/null
@@ -1,7 +0,0 @@
-.gitignore
-.npmignore
-src/
-spec/
-tsconfig.json
-webpack.config.js
-flatbuffers.sh

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/.npmrc
----------------------------------------------------------------------
diff --git a/js/.npmrc b/js/.npmrc
new file mode 100644
index 0000000..43c97e7
--- /dev/null
+++ b/js/.npmrc
@@ -0,0 +1 @@
+package-lock=false

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/DEVELOP.md
----------------------------------------------------------------------
diff --git a/js/DEVELOP.md b/js/DEVELOP.md
new file mode 100644
index 0000000..5b4ac14
--- /dev/null
+++ b/js/DEVELOP.md
@@ -0,0 +1,140 @@
+<!---
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+# The npm scripts
+
+* `npm run clean` - cleans targets
+* `npm run build` - cleans and compiles all targets
+* `npm test` - executes tests against built targets
+
+These npm scripts accept argument lists of targets × modules:
+
+* Available `targets` are `es5`, `es2015`, `esnext`, and `all` (default: `all`)
+* Available `modules` are `cjs`, `esm`, `umd`, and `all` (default: `all`)
+
+Examples:
+
+* `npm run build` -- builds all ES targets in all module formats
+* `npm run build -- -t es5 -m all` -- builds the ES5 target in all module formats
+* `npm run build -- -t all -m cjs` -- builds all ES targets in the CommonJS module format
+* `npm run build -- --targets es5 es2015 -m all` -- builds the ES5 and ES2015 targets in all module formats
+* `npm run build -- -t es5 --modules cjs esm` -- builds the ES5 target in CommonJS and ESModules module formats
+
+This argument configuration also applies to `clean` and `test` scripts.
+
+* `npm run deploy`
+
+Uses [learna](https://github.com/lerna/lerna) to publish each build target to npm with [conventional](https://conventionalcommits.org/) [changelogs](https://github.com/conventional-changelog/conventional-changelog/tree/master/packages/conventional-changelog-cli).
+
+# Updating the Arrow format flatbuffers generated code
+
+Once generated, the flatbuffers format code needs to be adjusted for our TS and JS build environments.
+
+## TypeScript
+
+1. Generate the flatbuffers TypeScript source from the Arrow project root directory:
+    ```sh
+    flatc --ts -o ./js/src/format ./format/*.fbs
+    ```
+1. Change all the `flatbuffers` imports to
+    ```ts
+    import { flatbuffers } from "flatbuffers"
+    ```
+1. Delete `Tensor_generated.ts` (remove this step once we support Tensors)
+1. Remove Tensor import from `Schema_generated.ts`
+1. Add `/* tslint:disable:class-name */` to the top of `Schema_generated.ts`
+
+## JavaScript (for Google Closure Compiler builds)
+
+1. Generate the flatbuffers JS source from the Arrow project root directory
+    ```sh
+    flatc --js -o ./js/closure-compiler-scripts ./format/*.fbs
+    ```
+1. Delete `Tensor_generated.js` (remove this step once we support Tensors)
+1. Add `goog.module` declarations to the top of each generated file
+
+    Each file starts with a header that looks like this:
+    ```js
+    // automatically generated by the FlatBuffers compiler, do not modify
+
+    /**
+    * @const
+    * @namespace
+    */
+    var org = org || {};
+    ```
+
+    Update the header of each file to explicitly declare its module.
+
+    `Schema_generated.js`:
+    ```js
+    // automatically generated by the FlatBuffers compiler, do not modify
+    goog.module("module$targets$es5$cls$format$Schema_generated");
+    goog.module.declareLegacyNamespace();
+
+    /**
+    * @const
+    * @namespace
+    */
+    var org = org || {};
+    ```
+
+    `File_generated.js`:
+
+    ```js
+    // automatically generated by the FlatBuffers compiler, do not modify
+    goog.module("module$targets$es5$cls$format$File_generated");
+    goog.module.declareLegacyNamespace();
+    var Schema_ = goog.require("module$targets$es5$cls$format$Schema_generated");
+    /**
+    * @const
+    * @namespace
+    */
+    var org = Schema_.org;
+    ```
+
+    `Message_generated.js`:
+
+    ```js
+    // automatically generated by the FlatBuffers compiler, do not modify
+    goog.module("module$targets$es5$cls$format$Message_generated");
+    goog.module.declareLegacyNamespace();
+    var Schema_ = goog.require("module$targets$es5$cls$format$Schema_generated");
+    /**
+    * @const
+    * @namespace
+    */
+    var org = Schema_.org;
+    ```
+
+1. Replace the last line's export declaration
+
+    The last line of each file is:
+
+    ```js
+    // Exports for Node.js and RequireJS
+    this.org = org;
+    ```
+
+    This should instead read:
+
+    ```js
+    // Exports for Node.js and RequireJS
+    exports.org = org;
+    ```
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/LICENSE
----------------------------------------------------------------------
diff --git a/js/LICENSE b/js/LICENSE
new file mode 100644
index 0000000..02e7948
--- /dev/null
+++ b/js/LICENSE
@@ -0,0 +1,39 @@
+## 3rd-party licenses for code that has been adapted for the Arrow JavaScript
+   library
+
+--------------------------------------------------------------------------------
+
+This project includes code from the FlatBuffers project
+
+Copyright 2014 Google Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+--------------------------------------------------------------------------------
+
+This project includes code from the tslib project
+
+Copyright 2015 Microsoft Corporation. All rights reserved. 
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/README.md
----------------------------------------------------------------------
diff --git a/js/README.md b/js/README.md
index 38e8faf..bee3a9c 100644
--- a/js/README.md
+++ b/js/README.md
@@ -17,50 +17,194 @@
   under the License.
 -->
 
-### Installation
+# [Apache Arrow](https://github.com/apache/arrow) in JS
 
-From this directory, run:
+[![Build Status](https://travis-ci.org/apache/arrow.svg?branch=master)](https://travis-ci.org/apache/arrow)
+[![Coverage Status](https://coveralls.io/repos/github/apache/arrow/badge.svg)](https://coveralls.io/github/apache/arrow)
 
-``` bash
-$ npm install   # pull dependencies
-$ npm run lint -- <filename>  # run tslint
-$ npm run build # build typescript (run tsc and webpack)
-$ npm run test  # run the unit tests (node.js only)
+Arrow is a set of technologies that enable big-data systems to process and move data fast.
+
+## install [apache-arrow from npm](https://www.npmjs.com/package/apache-arrow)
+
+`npm install apache-arrow`
+
+(read about how we [package apache-arrow](#packaging) below)
+
+# Powering Columnar In-Memory Analytics
+
+Apache Arrow is a columnar memory layout specification for encoding vectors and table-like containers of flat and nested data. The Arrow spec aligns columnar data in memory to minimize cache misses and take advantage of the latest SIMD (Single input multiple data) and GPU operations on modern processors.
+
+Apache Arrow is the emerging standard for large in-memory columnar data ([Spark](https://spark.apache.org/), [Pandas](http://wesmckinney.com/blog/pandas-and-apache-arrow/), [Drill](https://drill.apache.org/), ...). By standardizing on a common binary interchange format, big data systems can reduce the costs and friction associated with cross-system communication.
+
+# Related Projects
+
+* [GoAI](http://gpuopenanalytics.com/) -- Arrow-powered GPU analytics
+* [rxjs-mapd](https://github.com/graphistry/rxjs-mapd) -- A MapD Core node-driver that returns query results as Arrow columns
+
+# Usage
+
+## Get a table from an Arrow file on disk
+
+```es6
+import { readFileSync } from 'fs';
+import { Table } from 'apache-arrow';
+
+const arrow = readFileSync('simple.arrow');
+const table = Table.from(arrow);
+
+console.log(table.toString());
+
+/*
+ foo,  bar,  baz
+   1,    1,   aa
+null, null, null
+   3, null, null
+   4,    4,  bbb
+   5,    5, cccc
+*/
 ```
 
-### Usage
-The library is designed to be used with node.js or in the browser, this repository contains examples of both.
+## Create a Table when the Arrow file is split across buffers
+
+```es6
+import { readFileSync } from 'fs';
+import { Table } from 'apache-arrow';
 
-#### Node
-Import the arrow module:
+const table = Table.from(...[
+    'latlong/schema.arrow',
+    'latlong/records.arrow'
+].map((file) => readFileSync(file)));
 
-``` js
-var arrow = require("arrow");
+console.log(table.toString());
+
+/*
+        origin_lat,         origin_lon
+35.393089294433594,  -97.6007308959961
+35.393089294433594,  -97.6007308959961
+35.393089294433594,  -97.6007308959961
+29.533695220947266, -98.46977996826172
+29.533695220947266, -98.46977996826172
+*/
 ```
 
-See [bin/arrow_schema.js](bin/arrow_schema.js) and [bin/arrow2csv.js](bin/arrow2csv.js) for usage examples.
+## Columns are what you'd expect
+
+```es6
+import { readFileSync } from 'fs';
+import { Table } from 'apache-arrow';
+
+const table = Table.from(...[
+    'latlong/schema.arrow',
+    'latlong/records.arrow'
+].map(readFileSync));
+
+const column = table.getColumn('origin_lat');
+const typed = column.slice();
+
+assert(typed instanceof Float32Array);
 
-#### Browser
-Include `_bundles/arrow.js` in a `<script />` tag:
-``` html
-<script src="_bundles/arrow.js"/>
+for (let i = -1, n = column.length; ++i < n;) {
+    assert(column.get(i) === typed[i]);
+}
 ```
-See [examples/read_file.html](examples/read_file.html) for a usage example.
 
-### API
-##### `arrow.getReader(buffer)`
-Returns an `ArrowReader` object representing the Arrow file or stream contained in
-the `buffer`.
+## Usage with MapD Core
+
+```es6
+import MapD from 'rxjs-mapd';
+import { Table } from 'apache-arrow';
+
+const port = 9091;
+const host = `localhost`;
+const db = `mapd`;
+const user = `mapd`;
+const password = `HyperInteractive`;
+
+MapD.open(host, port)
+  .connect(db, user, password)
+  .flatMap((session) =>
+    // queryDF returns Arrow buffers
+    session.queryDF(`
+      SELECT origin_city
+      FROM flights
+      WHERE dest_city ILIKE 'dallas'
+      LIMIT 5`
+    ).disconnect()
+  )
+  .map(([schema, records]) =>
+    // Create Arrow Table from results
+    Table.from(schema, records))
+  .map((table) =>
+    // Stringify the table to CSV
+    table.toString({ index: true }))
+  .subscribe((csvStr) =>
+    console.log(csvStr));
+/*
+Index,   origin_city
+    0, Oklahoma City
+    1, Oklahoma City
+    2, Oklahoma City
+    3,   San Antonio
+    4,   San Antonio
+*/
+```
+
+# Getting involved
+
+See [develop.md](https://github.com/apache/arrow/blob/master/develop.md)
+
+Even if you do not plan to contribute to Apache Arrow itself or Arrow
+integrations in other projects, we'd be happy to have you involved:
+
+* Join the mailing list: send an email to
+  [dev-subscribe@arrow.apache.org][1]. Share your ideas and use cases for the
+  project.
+* [Follow our activity on JIRA][3]
+* [Learn the format][2]
+* Contribute code to one of the reference implementations
+
+We prefer to receive contributions in the form of GitHub pull requests. Please send pull requests against the [github.com/apache/arrow][4] repository.
+
+If you are looking for some ideas on what to contribute, check out the [JIRA
+issues][3] for the Apache Arrow project. Comment on the issue and/or contact
+[dev@arrow.apache.org](http://mail-archives.apache.org/mod_mbox/arrow-dev/)
+with your questions and ideas.
+
+If you’d like to report a bug but don’t have time to fix it, you can still post
+it on JIRA, or email the mailing list
+[dev@arrow.apache.org](http://mail-archives.apache.org/mod_mbox/arrow-dev/)
+
+## Packaging
+
+`apache-arrow` is written in TypeScript, but the project is compiled to multiple JS versions and common module formats.
+
+The base `apache-arrow` package includes all the compilation targets for convenience, but if you're conscientious about your `node_modules` footprint, we got you.
+
+The targets are also published under the `@apache-arrow` namespace:
+
+```sh
+npm install @apache-arrow/es5-cjs # ES5 CommonJS target
+npm install @apache-arrow/es5-esm # ES5 ESModules target
+npm install @apache-arrow/es5-umd # ES5 UMD target
+npm install @apache-arrow/es2015-cjs # ES2015 CommonJS target
+npm install @apache-arrow/es2015-esm # ES2015 ESModules target
+npm install @apache-arrow/es2015-umd # ES2015 UMD target
+npm install @apache-arrow/esnext-esm # ESNext CommonJS target
+npm install @apache-arrow/esnext-esm # ESNext ESModules target
+npm install @apache-arrow/esnext-umd # ESNext UMD target
+```
+
+### Why we package like this
+
+The JS community is a diverse group with a varied list of target environments and tool chains. Publishing multiple packages accommodates projects of all stripes.
 
-##### `ArrowReader.loadNextBatch()`
-Loads the next record batch and returns it's length.
+If you think we missed a compilation target and it's a blocker for adoption, please open an issue. We're here for you ❤️.
 
-##### `ArrowReader.getSchema()`
-Returns a JSON representation of the file's Arrow schema.
+# License
 
-##### `ArrowReader.getVectors()`
-Returns a list of `Vector` objects, one for each column.
-Vector objects have, at minimum, a `get(i)` method and a `length` attribute.
+[Apache 2.0](https://github.com/apache/arrow/blob/master/LICENSE)
 
-##### `ArrowReader.getVector(name: String)`
-Return a Vector object for column `name`
+[1]: mailto:dev-subscribe@arrow.apache.org
+[2]: https://github.com/apache/arrow/tree/master/format
+[3]: https://issues.apache.org/jira/browse/ARROW
+[4]: https://github.com/apache/arrow
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/bin/arrow2csv.js
----------------------------------------------------------------------
diff --git a/js/bin/arrow2csv.js b/js/bin/arrow2csv.js
index c1801f7..f316b84 100755
--- a/js/bin/arrow2csv.js
+++ b/js/bin/arrow2csv.js
@@ -17,37 +17,73 @@
 // specific language governing permissions and limitations
 // under the License.
 
-var fs = require('fs')
-var process = require('process');
-var arrow = require('../lib/arrow.js');
-var program = require('commander');
+var fs = require('fs');
+var Table = require('../dist/Arrow.js').Table;
+var optionList = [
+    {
+        type: String,
+        name: 'schema',
+        alias: 's', multiple: true,
+        typeLabel: '[underline]{columns}',
+        description: 'A space-delimited list of column names'
+    },
+    {
+        type: String,
+        name: 'file', alias: 'f',
+        description: 'The Arrow file to read'
+    }
+];
 
-function list (val) {
-    return val.split(',');
-}
+var argv = require(`command-line-args`)(optionList, { partial: true });
+var files = [argv.file, ...(argv._unknown || [])].filter(Boolean);
 
-program
-  .version('0.1.0')
-  .usage('[options] <file>')
-  .option('-s --schema <list>', 'A comma-separated list of column names', list)
-  .parse(process.argv);
+// console.log(JSON.stringify(argv));
 
-if (!program.schema) {
-    program.outputHelp();
+if (!argv.schema || !files.length) {
+    console.log(require('command-line-usage')([
+        {
+            header: 'arrow2csv',
+            content: 'Print a CSV from an Arrow file'
+        },
+        {
+            header: 'Synopsis',
+            content: [
+                '$ arrow2csv [underline]{file.arrow} [[bold]{--schema} column_name ...]',
+                '$ arrow2csv [[bold]{--schema} column_name ...] [[bold]{--file} [underline]{file.arrow}]',
+                '$ arrow2csv [bold]{-s} column_1 [bold]{-s} column_2 [[bold]{-f} [underline]{file.arrow}]',
+                '$ arrow2csv [[bold]{--help}]'
+            ]
+        },
+        {
+            header: 'Options',
+            optionList: [
+                ...optionList,
+                {
+                    name: 'help',
+                    description: 'Print this usage guide.'
+                }
+            ]
+        },
+        {
+            header: 'Example',
+            content: [
+                '$ arrow2csv --schema foo baz -f simple.arrow',
+                '>  foo,  baz',
+                '>    1,   aa',
+                '> null, null',
+                '>    3, null',
+                '>    4,  bbb',
+                '>    5, cccc',
+            ]
+        }
+    ]));
     process.exit(1);
 }
 
-var buf = fs.readFileSync(process.argv[process.argv.length - 1]);
-var reader = arrow.getReader(buf);
-var nrecords
-
-nrecords = reader.loadNextBatch();
-while (nrecords > 0) {
-  for (var i = 0; i < nrecords; i += 1|0) {
-    console.log(program.schema.map(function (field) {
-      return '' + reader.getVector(field).get(i);
-    }).join(','));
-  }
-  nrecords = reader.loadNextBatch();
-  if (nrecords > 0) console.log('---');
-}
+files.forEach((source) => {
+    var allColumns = Table.from(fs.readFileSync(source));
+    var selectedColumns = new Table(argv.schema.map((columnName) => {
+        return allColumns.getColumn(columnName);
+    }));
+    console.log(selectedColumns.toString());
+});

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/bin/arrow_schema.js
----------------------------------------------------------------------
diff --git a/js/bin/arrow_schema.js b/js/bin/arrow_schema.js
deleted file mode 100755
index 4917628..0000000
--- a/js/bin/arrow_schema.js
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /usr/bin/env node
-
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-var fs = require('fs');
-var process = require('process');
-var arrow = require('../lib/arrow.js');
-
-var buf = fs.readFileSync(process.argv[process.argv.length - 1]);
-var reader = arrow.getReader(buf);
-console.log(JSON.stringify(reader.getSchema(), null, '\t'));
-//console.log(JSON.stringify(reader.getVectors(), null, '\t'));
-console.log('block count: ' + reader.getBatchCount());
-

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/bower.json
----------------------------------------------------------------------
diff --git a/js/bower.json b/js/bower.json
deleted file mode 100644
index c2099f8..0000000
--- a/js/bower.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-  "name": "arrow",
-  "description": "",
-  "main": "_bundles/arrow.js",
-  "authors": [],
-  "license": "Apache-2.0",
-  "homepage": "http://arrow.apache.org",
-  "ignore": [
-    ".gitignore",
-    ".npmignore",
-    "src/",
-    "spec/",
-    "tsconfig.json",
-    "webpack.config.js",
-    "flatbuffers.sh"
-  ]
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/closure-compiler-scripts/File_generated.js
----------------------------------------------------------------------
diff --git a/js/closure-compiler-scripts/File_generated.js b/js/closure-compiler-scripts/File_generated.js
new file mode 100644
index 0000000..bb82cc4
--- /dev/null
+++ b/js/closure-compiler-scripts/File_generated.js
@@ -0,0 +1,264 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+goog.module("module$targets$es5$cls$format$File_generated");
+goog.module.declareLegacyNamespace();
+var Schema_ = goog.require("module$targets$es5$cls$format$Schema_generated");
+/**
+ * @const
+ * @namespace
+ */
+var org = Schema_.org;
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache = org.apache || {};
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache.arrow = org.apache.arrow || {};
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache.arrow.flatbuf = org.apache.arrow.flatbuf || {};
+
+/**
+ * ----------------------------------------------------------------------
+ * Arrow File metadata
+ *
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Footer = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Footer}
+ */
+org.apache.arrow.flatbuf.Footer.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Footer=} obj
+ * @returns {org.apache.arrow.flatbuf.Footer}
+ */
+org.apache.arrow.flatbuf.Footer.getRootAsFooter = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Footer).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.MetadataVersion}
+ */
+org.apache.arrow.flatbuf.Footer.prototype.version = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.MetadataVersion} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.MetadataVersion.V1;
+};
+
+/**
+ * @param {org.apache.arrow.flatbuf.Schema=} obj
+ * @returns {org.apache.arrow.flatbuf.Schema|null}
+ */
+org.apache.arrow.flatbuf.Footer.prototype.schema = function(obj) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? (obj || new org.apache.arrow.flatbuf.Schema).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+};
+
+/**
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.Block=} obj
+ * @returns {org.apache.arrow.flatbuf.Block}
+ */
+org.apache.arrow.flatbuf.Footer.prototype.dictionaries = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? (obj || new org.apache.arrow.flatbuf.Block).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Footer.prototype.dictionariesLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.Block=} obj
+ * @returns {org.apache.arrow.flatbuf.Block}
+ */
+org.apache.arrow.flatbuf.Footer.prototype.recordBatches = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 10);
+  return offset ? (obj || new org.apache.arrow.flatbuf.Block).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Footer.prototype.recordBatchesLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 10);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Footer.startFooter = function(builder) {
+  builder.startObject(4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.MetadataVersion} version
+ */
+org.apache.arrow.flatbuf.Footer.addVersion = function(builder, version) {
+  builder.addFieldInt16(0, version, org.apache.arrow.flatbuf.MetadataVersion.V1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} schemaOffset
+ */
+org.apache.arrow.flatbuf.Footer.addSchema = function(builder, schemaOffset) {
+  builder.addFieldOffset(1, schemaOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} dictionariesOffset
+ */
+org.apache.arrow.flatbuf.Footer.addDictionaries = function(builder, dictionariesOffset) {
+  builder.addFieldOffset(2, dictionariesOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Footer.startDictionariesVector = function(builder, numElems) {
+  builder.startVector(24, numElems, 8);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} recordBatchesOffset
+ */
+org.apache.arrow.flatbuf.Footer.addRecordBatches = function(builder, recordBatchesOffset) {
+  builder.addFieldOffset(3, recordBatchesOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.Footer.startRecordBatchesVector = function(builder, numElems) {
+  builder.startVector(24, numElems, 8);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Footer.endFooter = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} offset
+ */
+org.apache.arrow.flatbuf.Footer.finishFooterBuffer = function(builder, offset) {
+  builder.finish(offset);
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Block = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Block}
+ */
+org.apache.arrow.flatbuf.Block.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * Index to the start of the RecordBlock (note this is past the Message header)
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.Block.prototype.offset = function() {
+  return this.bb.readInt64(this.bb_pos);
+};
+
+/**
+ * Length of the metadata
+ *
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.Block.prototype.metaDataLength = function() {
+  return this.bb.readInt32(this.bb_pos + 8);
+};
+
+/**
+ * Length of the data (this is aligned so there can be a gap between this and
+ * the metatdata).
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.Block.prototype.bodyLength = function() {
+  return this.bb.readInt64(this.bb_pos + 16);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Long} offset
+ * @param {number} metaDataLength
+ * @param {flatbuffers.Long} bodyLength
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Block.createBlock = function(builder, offset, metaDataLength, bodyLength) {
+  builder.prep(8, 24);
+  builder.writeInt64(bodyLength);
+  builder.pad(4);
+  builder.writeInt32(metaDataLength);
+  builder.writeInt64(offset);
+  return builder.offset();
+};
+
+// Exports for Node.js and RequireJS
+exports.org = org;

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/closure-compiler-scripts/Message_generated.js
----------------------------------------------------------------------
diff --git a/js/closure-compiler-scripts/Message_generated.js b/js/closure-compiler-scripts/Message_generated.js
new file mode 100644
index 0000000..0c1a1a9
--- /dev/null
+++ b/js/closure-compiler-scripts/Message_generated.js
@@ -0,0 +1,486 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+goog.module("module$targets$es5$cls$format$Message_generated");
+goog.module.declareLegacyNamespace();
+var Schema_ = goog.require("module$targets$es5$cls$format$Schema_generated");
+/**
+ * @const
+ * @namespace
+ */
+var org = Schema_.org;
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache = org.apache || {};
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache.arrow = org.apache.arrow || {};
+
+/**
+ * @const
+ * @namespace
+ */
+org.apache.arrow.flatbuf = org.apache.arrow.flatbuf || {};
+
+/**
+ * ----------------------------------------------------------------------
+ * The root Message type
+ * This union enables us to easily send different message types without
+ * redundant storage, and in the future we can easily add new message types.
+ *
+ * Arrow implementations do not need to implement all of the message types,
+ * which may include experimental metadata types. For maximum compatibility,
+ * it is best to send data using RecordBatch
+ *
+ * @enum
+ */
+org.apache.arrow.flatbuf.MessageHeader = {
+  NONE: 0, 0: 'NONE',
+  Schema: 1, 1: 'Schema',
+  DictionaryBatch: 2, 2: 'DictionaryBatch',
+  RecordBatch: 3, 3: 'RecordBatch',
+  Tensor: 4, 4: 'Tensor',
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * Data structures for describing a table row batch (a collection of
+ * equal-length Arrow arrays)
+ * Metadata about a field at some level of a nested type tree (but not
+ * its children).
+ *
+ * For example, a List<Int16> with values [[1, 2, 3], null, [4], [5, 6], null]
+ * would have {length: 5, null_count: 2} for its List node, and {length: 6,
+ * null_count: 0} for its Int16 node, as separate FieldNode structs
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.FieldNode = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.FieldNode}
+ */
+org.apache.arrow.flatbuf.FieldNode.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * The number of value slots in the Arrow array at this level of a nested
+ * tree
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.FieldNode.prototype.length = function() {
+  return this.bb.readInt64(this.bb_pos);
+};
+
+/**
+ * The number of observed nulls. Fields with null_count == 0 may choose not
+ * to write their physical validity bitmap out as a materialized buffer,
+ * instead setting the length of the bitmap buffer to 0.
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.FieldNode.prototype.nullCount = function() {
+  return this.bb.readInt64(this.bb_pos + 8);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Long} length
+ * @param {flatbuffers.Long} null_count
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.FieldNode.createFieldNode = function(builder, length, null_count) {
+  builder.prep(8, 16);
+  builder.writeInt64(null_count);
+  builder.writeInt64(length);
+  return builder.offset();
+};
+
+/**
+ * A data header describing the shared memory layout of a "record" or "row"
+ * batch. Some systems call this a "row batch" internally and others a "record
+ * batch".
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.RecordBatch = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.RecordBatch}
+ */
+org.apache.arrow.flatbuf.RecordBatch.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.RecordBatch=} obj
+ * @returns {org.apache.arrow.flatbuf.RecordBatch}
+ */
+org.apache.arrow.flatbuf.RecordBatch.getRootAsRecordBatch = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.RecordBatch).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * number of records / rows. The arrays in the batch should all have this
+ * length
+ *
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.RecordBatch.prototype.length = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+};
+
+/**
+ * Nodes correspond to the pre-ordered flattened logical schema
+ *
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.FieldNode=} obj
+ * @returns {org.apache.arrow.flatbuf.FieldNode}
+ */
+org.apache.arrow.flatbuf.RecordBatch.prototype.nodes = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? (obj || new org.apache.arrow.flatbuf.FieldNode).__init(this.bb.__vector(this.bb_pos + offset) + index * 16, this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.RecordBatch.prototype.nodesLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * Buffers correspond to the pre-ordered flattened buffer tree
+ *
+ * The number of buffers appended to this list depends on the schema. For
+ * example, most primitive arrays will have 2 buffers, 1 for the validity
+ * bitmap and 1 for the values. For struct arrays, there will only be a
+ * single buffer for the validity (nulls) bitmap
+ *
+ * @param {number} index
+ * @param {org.apache.arrow.flatbuf.Buffer=} obj
+ * @returns {org.apache.arrow.flatbuf.Buffer}
+ */
+org.apache.arrow.flatbuf.RecordBatch.prototype.buffers = function(index, obj) {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? (obj || new org.apache.arrow.flatbuf.Buffer).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+};
+
+/**
+ * @returns {number}
+ */
+org.apache.arrow.flatbuf.RecordBatch.prototype.buffersLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.RecordBatch.startRecordBatch = function(builder) {
+  builder.startObject(3);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Long} length
+ */
+org.apache.arrow.flatbuf.RecordBatch.addLength = function(builder, length) {
+  builder.addFieldInt64(0, length, builder.createLong(0, 0));
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} nodesOffset
+ */
+org.apache.arrow.flatbuf.RecordBatch.addNodes = function(builder, nodesOffset) {
+  builder.addFieldOffset(1, nodesOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.RecordBatch.startNodesVector = function(builder, numElems) {
+  builder.startVector(16, numElems, 8);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} buffersOffset
+ */
+org.apache.arrow.flatbuf.RecordBatch.addBuffers = function(builder, buffersOffset) {
+  builder.addFieldOffset(2, buffersOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {number} numElems
+ */
+org.apache.arrow.flatbuf.RecordBatch.startBuffersVector = function(builder, numElems) {
+  builder.startVector(24, numElems, 8);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.RecordBatch.endRecordBatch = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * ----------------------------------------------------------------------
+ * For sending dictionary encoding information. Any Field can be
+ * dictionary-encoded, but in this case none of its children may be
+ * dictionary-encoded.
+ * There is one vector / column per dictionary
+ *
+ *
+ * @constructor
+ */
+org.apache.arrow.flatbuf.DictionaryBatch = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.DictionaryBatch}
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.DictionaryBatch=} obj
+ * @returns {org.apache.arrow.flatbuf.DictionaryBatch}
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.getRootAsDictionaryBatch = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.DictionaryBatch).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.prototype.id = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+};
+
+/**
+ * @param {org.apache.arrow.flatbuf.RecordBatch=} obj
+ * @returns {org.apache.arrow.flatbuf.RecordBatch|null}
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.prototype.data = function(obj) {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? (obj || new org.apache.arrow.flatbuf.RecordBatch).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.startDictionaryBatch = function(builder) {
+  builder.startObject(2);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Long} id
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.addId = function(builder, id) {
+  builder.addFieldInt64(0, id, builder.createLong(0, 0));
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} dataOffset
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.addData = function(builder, dataOffset) {
+  builder.addFieldOffset(1, dataOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.DictionaryBatch.endDictionaryBatch = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @constructor
+ */
+org.apache.arrow.flatbuf.Message = function() {
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   */
+  this.bb = null;
+
+  /**
+   * @type {number}
+   */
+  this.bb_pos = 0;
+};
+
+/**
+ * @param {number} i
+ * @param {flatbuffers.ByteBuffer} bb
+ * @returns {org.apache.arrow.flatbuf.Message}
+ */
+org.apache.arrow.flatbuf.Message.prototype.__init = function(i, bb) {
+  this.bb_pos = i;
+  this.bb = bb;
+  return this;
+};
+
+/**
+ * @param {flatbuffers.ByteBuffer} bb
+ * @param {org.apache.arrow.flatbuf.Message=} obj
+ * @returns {org.apache.arrow.flatbuf.Message}
+ */
+org.apache.arrow.flatbuf.Message.getRootAsMessage = function(bb, obj) {
+  return (obj || new org.apache.arrow.flatbuf.Message).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.MetadataVersion}
+ */
+org.apache.arrow.flatbuf.Message.prototype.version = function() {
+  var offset = this.bb.__offset(this.bb_pos, 4);
+  return offset ? /** @type {org.apache.arrow.flatbuf.MetadataVersion} */ (this.bb.readInt16(this.bb_pos + offset)) : org.apache.arrow.flatbuf.MetadataVersion.V1;
+};
+
+/**
+ * @returns {org.apache.arrow.flatbuf.MessageHeader}
+ */
+org.apache.arrow.flatbuf.Message.prototype.headerType = function() {
+  var offset = this.bb.__offset(this.bb_pos, 6);
+  return offset ? /** @type {org.apache.arrow.flatbuf.MessageHeader} */ (this.bb.readUint8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.MessageHeader.NONE;
+};
+
+/**
+ * @param {flatbuffers.Table} obj
+ * @returns {?flatbuffers.Table}
+ */
+org.apache.arrow.flatbuf.Message.prototype.header = function(obj) {
+  var offset = this.bb.__offset(this.bb_pos, 8);
+  return offset ? this.bb.__union(obj, this.bb_pos + offset) : null;
+};
+
+/**
+ * @returns {flatbuffers.Long}
+ */
+org.apache.arrow.flatbuf.Message.prototype.bodyLength = function() {
+  var offset = this.bb.__offset(this.bb_pos, 10);
+  return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ */
+org.apache.arrow.flatbuf.Message.startMessage = function(builder) {
+  builder.startObject(4);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.MetadataVersion} version
+ */
+org.apache.arrow.flatbuf.Message.addVersion = function(builder, version) {
+  builder.addFieldInt16(0, version, org.apache.arrow.flatbuf.MetadataVersion.V1);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {org.apache.arrow.flatbuf.MessageHeader} headerType
+ */
+org.apache.arrow.flatbuf.Message.addHeaderType = function(builder, headerType) {
+  builder.addFieldInt8(1, headerType, org.apache.arrow.flatbuf.MessageHeader.NONE);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} headerOffset
+ */
+org.apache.arrow.flatbuf.Message.addHeader = function(builder, headerOffset) {
+  builder.addFieldOffset(2, headerOffset, 0);
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Long} bodyLength
+ */
+org.apache.arrow.flatbuf.Message.addBodyLength = function(builder, bodyLength) {
+  builder.addFieldInt64(3, bodyLength, builder.createLong(0, 0));
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @returns {flatbuffers.Offset}
+ */
+org.apache.arrow.flatbuf.Message.endMessage = function(builder) {
+  var offset = builder.endObject();
+  return offset;
+};
+
+/**
+ * @param {flatbuffers.Builder} builder
+ * @param {flatbuffers.Offset} offset
+ */
+org.apache.arrow.flatbuf.Message.finishMessageBuffer = function(builder, offset) {
+  builder.finish(offset);
+};
+
+// Exports for Node.js and RequireJS
+exports.org = org;


[4/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/index.js
----------------------------------------------------------------------
diff --git a/js/perf/index.js b/js/perf/index.js
new file mode 100644
index 0000000..669f690
--- /dev/null
+++ b/js/perf/index.js
@@ -0,0 +1,113 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// Use the ES5 UMD target as perf baseline
+// ES6/7 iterators are faster in turbofan, but something about the
+// ES5 transpilation (rewriting let and const to var?) JITs better
+const { Table, readBuffers } = require('../dist/Arrow');
+// const { Table, readBuffers } = require('../targets/es5/cjs');
+// const { Table, readBuffers } = require('../targets/es2015/cjs');
+// const { Table, readBuffers } = require('../targets/esnext/cjs');
+
+const Benchmark = require('benchmark');
+const arrowTestConfigurations = require('./config');
+
+const suites = [];
+
+for (let [name, ...buffers] of arrowTestConfigurations) {
+    const parseSuite = new Benchmark.Suite(`Parse ${name}`, { async: true });
+    const sliceSuite = new Benchmark.Suite(`Slice ${name} vectors`, { async: true });
+    const iterateSuite = new Benchmark.Suite(`Iterate ${name} vectors`, { async: true });
+    const getByIndexSuite = new Benchmark.Suite(`Get ${name} values by index`, { async: true });
+    parseSuite.add(createFromTableTest(name, buffers));
+    parseSuite.add(createReadBuffersTest(name, buffers));
+    for (const vector of Table.from(...buffers).cols()) {
+        sliceSuite.add(createSliceTest(vector));
+        iterateSuite.add(createIterateTest(vector));
+        getByIndexSuite.add(createGetByIndexTest(vector));
+    }
+    suites.push(parseSuite, sliceSuite, getByIndexSuite, iterateSuite);
+}
+
+console.log('Running apache-arrow performance tests...\n');
+
+run();
+
+function run() {
+    var suite = suites.shift();
+    suite && suite.on('complete', function() {
+        console.log(suite.name + ':\n' + this.map(function(x) {
+            var str = x.toString();
+            var meanMsPerOp = Math.round(x.stats.mean * 100000)/100;
+            var sliceOf60FPS = Math.round((meanMsPerOp / (1000/60)) * 100000)/1000;
+            return `${str} (avg: ${meanMsPerOp}ms, or ${sliceOf60FPS}% of a frame @ 60FPS) ${x.suffix || ''}`;
+        }).join('\n') + '\n');
+        if (suites.length > 0) {
+            setTimeout(run, 1000);
+        }
+    })
+    .run({ async: true });
+}
+
+function createFromTableTest(name, buffers) {
+    let table;
+    return {
+        async: true,
+        name: `Table.from`,
+        fn() { table = Table.from(...buffers); }
+    };
+}
+
+function createReadBuffersTest(name, buffers) {
+    let vectors;
+    return {
+        async: true,
+        name: `readBuffers`,
+        fn() { for (vectors of readBuffers(...buffers)) {} }
+    };
+}
+
+function createSliceTest(vector) {
+    let xs;
+    return {
+        async: true,
+        name: `name: '${vector.name}', length: ${vector.length}, type: ${vector.type}`,
+        fn() { xs = vector.slice(); }
+    };
+}
+
+function createIterateTest(vector) {
+    let value;
+    return {
+        async: true,
+        name: `name: '${vector.name}', length: ${vector.length}, type: ${vector.type}`,
+        fn() { for (value of vector) {} }
+    };
+}
+
+function createGetByIndexTest(vector) {
+    let value;
+    return {
+        async: true,
+        name: `name: '${vector.name}', length: ${vector.length}, type: ${vector.type}`,
+        fn() {
+            for (let i = -1, n = vector.length; ++i < n;) {
+                value = vector.get(i);
+            }
+        }
+    };
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/prepublish.sh
----------------------------------------------------------------------
diff --git a/js/prepublish.sh b/js/prepublish.sh
new file mode 100644
index 0000000..4ad8db1
--- /dev/null
+++ b/js/prepublish.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+npm run lint
+npm run build
+npm run test
+preset=`conventional-commits-detector` && echo $preset
+bump=`conventional-recommended-bump -p $preset` && echo $bump
+npm --no-git-tag-version version $bump &>/dev/null
+npm run lerna:publish
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/spec/arrow.js
----------------------------------------------------------------------
diff --git a/js/spec/arrow.js b/js/spec/arrow.js
deleted file mode 100644
index 52c586b..0000000
--- a/js/spec/arrow.js
+++ /dev/null
@@ -1,179 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-var fs = require('fs');
-var chai = require('chai');
-var assert = chai.assert;
-var path= require('path');
-var arrow = require('../lib/arrow.js');
-
-test_files = [
-  {
-    name: 'simple',
-    batches: 1,
-    fields: [
-      {
-        "name": "foo",
-        "type": "Int",
-        "data": [[1, null, 3, 4, 5]]
-      },
-      {
-        "name": "bar",
-        "type": "FloatingPoint",
-        "data": [[1.0, null, null, 4.0, 5.0]]
-      },
-      {
-        "name": "baz",
-        "type": "Utf8",
-        "data": [["aa", null, null, "bbb", "cccc"]]
-      }
-    ]
-  },
-  {
-    name: 'struct_example',
-    batches: 2,
-    fields: [
-      {
-        "name": "struct_nullable",
-        "type": "Struct",
-        "data": [
-          [
-            null,
-            [null, 'MhRNxD4'],
-            [137773603, '3F9HBxK'],
-            [410361374, 'aVd88fp'],
-            null,
-            [null, '3loZrRf'],
-            null
-          ], [
-            null,
-            [null,null],
-            [null,null],
-            null,
-            [null, '78SLiRw'],
-            null,
-            null,
-            [null, '0ilsf82'],
-            [null, 'LjS9MbU'],
-            [null, null],
-          ]
-        ]
-      }
-    ]
-  },
-  {
-    name: 'dictionary',
-    batches: 2,
-    fields: [
-      {
-        "name": "example-csv",
-        "type": "Struct",
-        "data": [
-          [
-            ["Hermione", 25, new Float32Array([-53.235599517822266, 40.231998443603516])],
-            ["Severus", 30, new Float32Array([-62.22999954223633, 3])],
-          ], [
-            ["Harry", 20, new Float32Array([23, -100.23652648925781])]
-          ]
-        ]
-      }
-    ]
-  },
-];
-
-var buf;
-
-function makeSchemaChecks(fields) {
-  describe('schema', function () {
-    var schema;
-    beforeEach(function () {
-      schema = arrow.getSchema(buf);
-    });
-
-    it('should read the number of fields', function () {
-        assert.lengthOf(schema, fields.length);
-    });
-
-    it("should understand fields", function () {
-      for (i = 0; i < fields.length; i += 1|0) {
-          assert.equal(schema[i].name, fields[i].name);
-          assert.equal(schema[i].type, fields[i].type,
-                       'bad type for field ' + schema[i].name);
-      }
-    });
-  });
-}
-
-function makeDataChecks (batches, fields) {
-  describe('data', function() {
-    var reader;
-    beforeEach(function () {
-        reader = arrow.getReader(buf)
-    });
-    it('should read the correct number of record batches', function () {
-        assert.equal(reader.getBatchCount(), batches);
-    });
-    fields.forEach(function (field, i) {
-      it('should read ' + field.type + ' vector ' + field.name, function () {
-        for (var batch_idx = 0; batch_idx < batches; batch_idx += 1|0) {
-          reader.loadNextBatch();
-          var batch = field.data[batch_idx];
-          var vector = reader.getVector(field.name)
-          assert.isDefined(vector, "vector " + field.name);
-          assert.lengthOf(vector, batch.length, "vector " + field.name)
-          for (i = 0; i < vector.length; i += 1|0) {
-            if (field.type == "Date") {
-              assert.equal(vector.get(i).getTime(), batch[i].getTime(),
-                           "vector " + field.name + " index " + i);
-            } else {
-              assert.deepEqual(vector.get(i), batch[i],
-                               "vector " + field.name + " index " + i);
-            }
-          }
-        }
-      });
-    });
-  });
-}
-
-describe('arrow random-access file', function () {
-  test_files.forEach(function (test_file) {
-    describe(test_file.name, function () {
-      var fields = test_file.fields
-      beforeEach(function () {
-        buf = fs.readFileSync(path.resolve(__dirname, test_file.name + '.arrow'));
-      });
-
-      makeSchemaChecks(fields);
-      makeDataChecks(test_file.batches, fields);
-    })
-  });
-});
-
-describe('arrow streaming file format', function () {
-  test_files.forEach(function (test_file) {
-    describe(test_file.name, function () {
-      var fields = test_file.fields
-      beforeEach(function () {
-        buf = fs.readFileSync(path.resolve(__dirname, test_file.name + '-stream.arrow'));
-      });
-
-      makeSchemaChecks(fields);
-      makeDataChecks(test_file.batches, fields);
-    })
-  });
-});

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/spec/dictionary-stream.arrow
----------------------------------------------------------------------
diff --git a/js/spec/dictionary-stream.arrow b/js/spec/dictionary-stream.arrow
deleted file mode 100644
index 17ca48b..0000000
Binary files a/js/spec/dictionary-stream.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/spec/dictionary.arrow
----------------------------------------------------------------------
diff --git a/js/spec/dictionary.arrow b/js/spec/dictionary.arrow
deleted file mode 100644
index 34d41db..0000000
Binary files a/js/spec/dictionary.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/spec/simple-stream.arrow
----------------------------------------------------------------------
diff --git a/js/spec/simple-stream.arrow b/js/spec/simple-stream.arrow
deleted file mode 100644
index 2c68c0e..0000000
Binary files a/js/spec/simple-stream.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/spec/simple.arrow
----------------------------------------------------------------------
diff --git a/js/spec/simple.arrow b/js/spec/simple.arrow
deleted file mode 100644
index 838db6d..0000000
Binary files a/js/spec/simple.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/spec/struct_example-stream.arrow
----------------------------------------------------------------------
diff --git a/js/spec/struct_example-stream.arrow b/js/spec/struct_example-stream.arrow
deleted file mode 100644
index 4e97b70..0000000
Binary files a/js/spec/struct_example-stream.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/spec/struct_example.arrow
----------------------------------------------------------------------
diff --git a/js/spec/struct_example.arrow b/js/spec/struct_example.arrow
deleted file mode 100644
index 3d2c018..0000000
Binary files a/js/spec/struct_example.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/Arrow.externs.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow.externs.ts b/js/src/Arrow.externs.ts
new file mode 100644
index 0000000..7342684
--- /dev/null
+++ b/js/src/Arrow.externs.ts
@@ -0,0 +1,67 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+/**
+ * @fileoverview Closure Compiler externs for Arrow
+ * @externs
+ * @suppress {duplicate,checkTypes}
+ */
+/** @type {symbol} */
+Symbol.iterator;
+/** @type {symbol} */
+Symbol.asyncIterator;
+let Table = function() {};
+/** @type {?} */
+Table.prototype.rows;
+/** @type {?} */
+Table.prototype.cols;
+/** @type {?} */
+Table.prototype.getRow;
+/** @type {?} */
+Table.prototype.getCell;
+/** @type {?} */
+Table.prototype.getCellAt;
+/** @type {?} */
+Table.prototype.getColumn;
+/** @type {?} */
+Table.prototype.getColumnAt;
+/** @type {?} */
+Table.prototype.toString;
+
+let Vector = function() {};
+/** @type {?} */
+Vector.prototype.length;
+/** @type {?} */
+Vector.prototype.name;
+/** @type {?} */
+Vector.prototype.type;
+/** @type {?} */
+Vector.prototype.props;
+/** @type {?} */
+Vector.prototype.get;
+/** @type {?} */
+Vector.prototype.concat;
+/** @type {?} */
+Vector.prototype.slice;
+
+let TypedVector = function() {};
+/** @type {?} */
+TypedVector.prototype.arrayType;
+
+let ValidityVector = function() {};
+/** @type {?} */
+(<any> ValidityVector).pack;

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/Arrow.internal.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow.internal.ts b/js/src/Arrow.internal.ts
new file mode 100644
index 0000000..d8f0c37
--- /dev/null
+++ b/js/src/Arrow.internal.ts
@@ -0,0 +1,105 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Vector as Vector_ } from './vector/vector';
+import { StructVector as StructVector_ } from './vector/struct';
+import { DictionaryVector as DictionaryVector_ } from './vector/dictionary';
+import { ListVector as ListVector_, Utf8Vector as Utf8Vector_, FixedSizeListVector as FixedSizeListVector_ } from './vector/list';
+import {
+    TypedVector as TypedVector_, BitVector as BitVector_,
+    DateVector as DateVector_, IndexVector as IndexVector_,
+    Int8Vector as Int8Vector_, Int16Vector as Int16Vector_,
+    Int32Vector as Int32Vector_, Int64Vector as Int64Vector_,
+    Uint8Vector as Uint8Vector_, Uint16Vector as Uint16Vector_,
+    Uint32Vector as Uint32Vector_, Uint64Vector as Uint64Vector_,
+    Float32Vector as Float32Vector_, Float64Vector as Float64Vector_,
+} from './vector/typed';
+
+export const vectors = {
+    Vector: Vector_,
+    BitVector: BitVector_,
+    ListVector: ListVector_,
+    Utf8Vector: Utf8Vector_,
+    DateVector: DateVector_,
+    IndexVector: IndexVector_,
+    TypedVector: TypedVector_,
+    Int8Vector: Int8Vector_,
+    Int16Vector: Int16Vector_,
+    Int32Vector: Int32Vector_,
+    Int64Vector: Int64Vector_,
+    Uint8Vector: Uint8Vector_,
+    Uint16Vector: Uint16Vector_,
+    Uint32Vector: Uint32Vector_,
+    Uint64Vector: Uint64Vector_,
+    Float32Vector: Float32Vector_,
+    Float64Vector: Float64Vector_,
+    StructVector: StructVector_,
+    DictionaryVector: DictionaryVector_,
+    FixedSizeListVector: FixedSizeListVector_,
+};
+
+export namespace vectors {
+    export type Vector<T> =  Vector_<T>;
+    export type BitVector =  BitVector_;
+    export type ListVector<T> =  ListVector_<T>;
+    export type Utf8Vector =  Utf8Vector_;
+    export type DateVector =  DateVector_;
+    export type IndexVector =  IndexVector_;
+    export type Int8Vector =  Int8Vector_;
+    export type Int16Vector =  Int16Vector_;
+    export type Int32Vector =  Int32Vector_;
+    export type Int64Vector =  Int64Vector_;
+    export type Uint8Vector =  Uint8Vector_;
+    export type Uint16Vector =  Uint16Vector_;
+    export type Uint32Vector =  Uint32Vector_;
+    export type Uint64Vector =  Uint64Vector_;
+    export type Float32Vector =  Float32Vector_;
+    export type Float64Vector =  Float64Vector_;
+    export type StructVector =  StructVector_;
+    export type DictionaryVector<T> =  DictionaryVector_<T>;
+    export type FixedSizeListVector<T> =  FixedSizeListVector_<T>;
+    export type TypedVector<T, TArray> =  TypedVector_<T, TArray>;
+}
+
+/* These exports are needed for the closure umd targets */
+try {
+    const Arrow = eval('exports');
+    if (typeof Arrow === 'object') {
+        // string indexers tell closure compiler not to rename these properties
+        Arrow['vectors'] = {};
+        Arrow['vectors']['Vector'] = Vector_;
+        Arrow['vectors']['BitVector'] = BitVector_;
+        Arrow['vectors']['ListVector'] = ListVector_;
+        Arrow['vectors']['Utf8Vector'] = Utf8Vector_;
+        Arrow['vectors']['DateVector'] = DateVector_;
+        Arrow['vectors']['IndexVector'] = IndexVector_;
+        Arrow['vectors']['Int8Vector'] = Int8Vector_;
+        Arrow['vectors']['Int16Vector'] = Int16Vector_;
+        Arrow['vectors']['Int32Vector'] = Int32Vector_;
+        Arrow['vectors']['Int64Vector'] = Int64Vector_;
+        Arrow['vectors']['Uint8Vector'] = Uint8Vector_;
+        Arrow['vectors']['Uint16Vector'] = Uint16Vector_;
+        Arrow['vectors']['Uint32Vector'] = Uint32Vector_;
+        Arrow['vectors']['Uint64Vector'] = Uint64Vector_;
+        Arrow['vectors']['Float32Vector'] = Float32Vector_;
+        Arrow['vectors']['Float64Vector'] = Float64Vector_;
+        Arrow['vectors']['StructVector'] = StructVector_;
+        Arrow['vectors']['DictionaryVector'] = DictionaryVector_;
+        Arrow['vectors']['FixedSizeListVector'] = FixedSizeListVector_;
+    }
+} catch (e) { /* not the UMD bundle */ }
+/** end closure exports */

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/Arrow.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow.ts b/js/src/Arrow.ts
new file mode 100644
index 0000000..fe19645
--- /dev/null
+++ b/js/src/Arrow.ts
@@ -0,0 +1,31 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Table } from './table';
+import { readBuffers } from './reader/arrow';
+export { Table, readBuffers };
+
+/* These exports are needed for the closure umd targets */
+try {
+    const Arrow = eval('exports');
+    if (typeof Arrow === 'object') {
+        // string indexers tell closure compiler not to rename these properties
+        Arrow['Table'] = Table;
+        Arrow['readBuffers'] = readBuffers;
+    }
+} catch (e) { /* not the UMD bundle */ }
+/** end closure exports */

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/Arrow_generated.d.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow_generated.d.ts b/js/src/Arrow_generated.d.ts
deleted file mode 100644
index 1f5b454..0000000
--- a/js/src/Arrow_generated.d.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-export var org: {
-  apache: {
-    arrow: any
-  }
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/arrow.ts
----------------------------------------------------------------------
diff --git a/js/src/arrow.ts b/js/src/arrow.ts
deleted file mode 100644
index ac87a8c..0000000
--- a/js/src/arrow.ts
+++ /dev/null
@@ -1,515 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { flatbuffers } from "flatbuffers";
-import { org } from "./Arrow_generated";
-import { Vector, vectorFromField } from "./types";
-
-import ByteBuffer = flatbuffers.ByteBuffer;
-const Footer = org.apache.arrow.flatbuf.Footer;
-const Message = org.apache.arrow.flatbuf.Message;
-const MessageHeader = org.apache.arrow.flatbuf.MessageHeader;
-const RecordBatch = org.apache.arrow.flatbuf.RecordBatch;
-const DictionaryBatch = org.apache.arrow.flatbuf.DictionaryBatch;
-const Schema = org.apache.arrow.flatbuf.Schema;
-const Type = org.apache.arrow.flatbuf.Type;
-const VectorType = org.apache.arrow.flatbuf.VectorType;
-
-export class ArrowReader {
-
-    private bb;
-    private schema: any = [];
-    private vectors: Vector[];
-    private vectorMap: any = {};
-    private dictionaries: any = {};
-    private batches: any = [];
-    private batchIndex: number = 0;
-
-    constructor(bb, schema, vectors: Vector[], batches, dictionaries) {
-        this.bb = bb;
-        this.schema = schema;
-        this.vectors = vectors;
-        for (let i = 0; i < vectors.length; i++) {
-            this.vectorMap[vectors[i].name] = vectors[i];
-        }
-        this.batches = batches;
-        this.dictionaries = dictionaries;
-    }
-
-    public loadNextBatch() {
-        if (this.batchIndex < this.batches.length) {
-            const batch = this.batches[this.batchIndex];
-            this.batchIndex += 1;
-            loadVectors(this.bb, this.vectors, batch);
-            return batch.length;
-        } else {
-            return 0;
-        }
-    }
-
-    public getSchema() {
-        return this.schema;
-    }
-
-    public getVectors() {
-        return this.vectors;
-    }
-
-    public getVector(name) {
-        return this.vectorMap[name];
-    }
-
-    public getBatchCount() {
-        return this.batches.length;
-    }
-
-    // the index of the next batch to be loaded
-    public getBatchIndex() {
-        return this.batchIndex;
-    }
-
-    // set the index of the next batch to be loaded
-    public setBatchIndex(i: number) {
-        this.batchIndex = i;
-    }
-}
-
-export function getSchema(buf) { return getReader(buf).getSchema(); }
-
-export function getReader(buf): ArrowReader {
-    if (_checkMagic(buf, 0)) {
-        return getFileReader(buf);
-    } else {
-        return getStreamReader(buf);
-    }
-}
-
-export function getStreamReader(buf): ArrowReader {
-    const bb = new ByteBuffer(buf);
-
-    const schema = _loadSchema(bb);
-    let field;
-    const vectors: Vector[] = [];
-    let i;
-    let iLen;
-    let batch;
-    const recordBatches = [];
-    const dictionaryBatches = [];
-    const dictionaries = {};
-
-    for (i = 0, iLen = schema.fieldsLength(); i < iLen; i++) {
-        field = schema.fields(i);
-        _createDictionaryVectors(field, dictionaries);
-        vectors.push(vectorFromField(field, dictionaries));
-    }
-
-    while (bb.position() < bb.capacity()) {
-      batch = _loadBatch(bb);
-      if (batch == null) {
-          break;
-      } else if (batch.type === MessageHeader.DictionaryBatch) {
-          dictionaryBatches.push(batch);
-      } else if (batch.type === MessageHeader.RecordBatch) {
-          recordBatches.push(batch);
-      } else {
-          throw new Error("Expected batch type" + MessageHeader.RecordBatch + " or " +
-              MessageHeader.DictionaryBatch + " but got " + batch.type);
-      }
-    }
-
-    // load dictionary vectors
-    for (i = 0; i < dictionaryBatches.length; i++) {
-      batch = dictionaryBatches[i];
-      loadVectors(bb, [dictionaries[batch.id]], batch);
-    }
-
-    return new ArrowReader(bb, parseSchema(schema), vectors, recordBatches, dictionaries);
-}
-
-export function getFileReader(buf): ArrowReader {
-    const bb = new ByteBuffer(buf);
-
-    const footer = _loadFooter(bb);
-
-    const schema = footer.schema();
-    let i;
-    let len;
-    let field;
-    const vectors: Vector[] = [];
-    let block;
-    let batch;
-    const recordBatchBlocks = [];
-    const dictionaryBatchBlocks = [];
-    const dictionaries = {};
-
-    for (i = 0, len = schema.fieldsLength(); i < len; i++) {
-        field = schema.fields(i);
-        _createDictionaryVectors(field, dictionaries);
-        vectors.push(vectorFromField(field, dictionaries));
-    }
-
-    for (i = 0; i < footer.dictionariesLength(); i++) {
-        block = footer.dictionaries(i);
-        dictionaryBatchBlocks.push({
-            bodyLength: block.bodyLength().low,
-            metaDataLength: block.metaDataLength(),
-            offset: block.offset().low,
-        });
-    }
-
-    for (i = 0; i < footer.recordBatchesLength(); i++) {
-        block = footer.recordBatches(i);
-        recordBatchBlocks.push({
-            bodyLength: block.bodyLength().low,
-            metaDataLength: block.metaDataLength(),
-            offset: block.offset().low,
-        });
-    }
-
-    const dictionaryBatches = dictionaryBatchBlocks.map((batchBlock) => {
-        bb.setPosition(batchBlock.offset);
-        // TODO: Make sure this is a dictionary batch
-        return _loadBatch(bb);
-    });
-
-    const recordBatches = recordBatchBlocks.map((batchBlock) => {
-        bb.setPosition(batchBlock.offset);
-        // TODO: Make sure this is a record batch
-        return _loadBatch(bb);
-    });
-
-    // load dictionary vectors
-    for (i = 0; i < dictionaryBatches.length; i++) {
-        batch = dictionaryBatches[i];
-        loadVectors(bb, [dictionaries[batch.id]], batch);
-    }
-
-    return new ArrowReader(bb, parseSchema(schema), vectors, recordBatches, dictionaries);
-}
-
-function _loadFooter(bb) {
-    const fileLength: number = bb.bytes_.length;
-
-    if (fileLength < MAGIC.length * 2 + 4) {
-      throw new Error("file too small " + fileLength);
-    }
-
-    if (!_checkMagic(bb.bytes_, 0)) {
-      throw new Error("missing magic bytes at beginning of file");
-    }
-
-    if (!_checkMagic(bb.bytes_, fileLength - MAGIC.length)) {
-      throw new Error("missing magic bytes at end of file");
-    }
-
-    const footerLengthOffset: number = fileLength - MAGIC.length - 4;
-    bb.setPosition(footerLengthOffset);
-    const footerLength: number = Int32FromByteBuffer(bb, footerLengthOffset);
-
-    if (footerLength <= 0 || footerLength + MAGIC.length * 2 + 4 > fileLength)  {
-      throw new Error("Invalid footer length: " + footerLength);
-    }
-
-    const footerOffset: number = footerLengthOffset - footerLength;
-    bb.setPosition(footerOffset);
-    const footer = Footer.getRootAsFooter(bb);
-
-    return footer;
-}
-
-function _loadSchema(bb) {
-    const message = _loadMessage(bb);
-    if (message.headerType() !== MessageHeader.Schema) {
-        throw new Error("Expected header type " + MessageHeader.Schema + " but got " + message.headerType());
-    }
-    return message.header(new Schema());
-}
-
-function _loadBatch(bb) {
-    const message = _loadMessage(bb);
-    if (message == null) {
-        return;
-    } else if (message.headerType() === MessageHeader.RecordBatch) {
-        const batch = { header: message.header(new RecordBatch()), length: message.bodyLength().low };
-        return _loadRecordBatch(bb, batch);
-    } else if (message.headerType() === MessageHeader.DictionaryBatch) {
-        const batch = { header: message.header(new DictionaryBatch()), length: message.bodyLength().low };
-        return _loadDictionaryBatch(bb, batch);
-    } else {
-        throw new Error("Expected header type " + MessageHeader.RecordBatch + " or " + MessageHeader.DictionaryBatch +
-            " but got " + message.headerType());
-    }
-}
-
-function _loadRecordBatch(bb, batch) {
-    const data = batch.header;
-    let i;
-    const nodesLength = data.nodesLength();
-    const nodes = new Array(nodesLength);
-    let buffer;
-    const buffersLength = data.buffersLength();
-    const buffers = new Array(buffersLength);
-
-    for (i = 0; i < nodesLength; i += 1) {
-        nodes[i] = data.nodes(i);
-    }
-
-    for (i = 0; i < buffersLength; i += 1) {
-        buffer = data.buffers(i);
-        buffers[i] = {
-            length: buffer.length().low,
-            offset: bb.position() + buffer.offset().low,
-        };
-    }
-    // position the buffer after the body to read the next message
-    bb.setPosition(bb.position() + batch.length);
-
-    return { nodes, buffers, length: data.length().low, type: MessageHeader.RecordBatch };
-}
-
-function _loadDictionaryBatch(bb, batch) {
-    const id = batch.header.id().toFloat64().toString();
-    const data = batch.header.data();
-    let i;
-    const nodesLength = data.nodesLength();
-    const nodes = new Array(nodesLength);
-    let buffer;
-    const buffersLength = data.buffersLength();
-    const buffers = new Array(buffersLength);
-
-    for (i = 0; i < nodesLength; i += 1) {
-        nodes[i] = data.nodes(i);
-    }
-    for (i = 0; i < buffersLength; i += 1) {
-        buffer = data.buffers(i);
-        buffers[i] = {
-            length: buffer.length().low,
-            offset: bb.position() + buffer.offset().low,
-        };
-    }
-    // position the buffer after the body to read the next message
-    bb.setPosition(bb.position() + batch.length);
-
-    return {
-        buffers,
-        id,
-        length: data.length().low,
-        nodes,
-        type: MessageHeader.DictionaryBatch,
-    };
-}
-
-function _loadMessage(bb) {
-    const messageLength: number = Int32FromByteBuffer(bb, bb.position());
-    if (messageLength === 0) {
-      return;
-    }
-    bb.setPosition(bb.position() + 4);
-    const message = Message.getRootAsMessage(bb);
-    // position the buffer at the end of the message so it's ready to read further
-    bb.setPosition(bb.position() + messageLength);
-
-    return message;
-}
-
-function _createDictionaryVectors(field, dictionaries) {
-    const encoding = field.dictionary();
-    if (encoding != null) {
-        const id = encoding.id().toFloat64().toString();
-        if (dictionaries[id] == null) {
-            // create a field for the dictionary
-            const dictionaryField = _createDictionaryField(id, field);
-            dictionaries[id] = vectorFromField(dictionaryField, null);
-        }
-    }
-
-    // recursively examine child fields
-    for (let i = 0, len = field.childrenLength(); i < len; i++) {
-        _createDictionaryVectors(field.children(i), dictionaries);
-    }
-}
-
-function _createDictionaryField(id, field) {
-    const builder = new flatbuffers.Builder();
-    const nameOffset = builder.createString("dict-" + id);
-
-    const typeType = field.typeType();
-    let typeOffset;
-    if (typeType === Type.Int) {
-        const type = field.type(new org.apache.arrow.flatbuf.Int());
-        org.apache.arrow.flatbuf.Int.startInt(builder);
-        org.apache.arrow.flatbuf.Int.addBitWidth(builder, type.bitWidth());
-        org.apache.arrow.flatbuf.Int.addIsSigned(builder, type.isSigned());
-        typeOffset = org.apache.arrow.flatbuf.Int.endInt(builder);
-    } else if (typeType === Type.FloatingPoint) {
-        const type = field.type(new org.apache.arrow.flatbuf.FloatingPoint());
-        org.apache.arrow.flatbuf.FloatingPoint.startFloatingPoint(builder);
-        org.apache.arrow.flatbuf.FloatingPoint.addPrecision(builder, type.precision());
-        typeOffset = org.apache.arrow.flatbuf.FloatingPoint.endFloatingPoint(builder);
-    } else if (typeType === Type.Utf8) {
-        org.apache.arrow.flatbuf.Utf8.startUtf8(builder);
-        typeOffset = org.apache.arrow.flatbuf.Utf8.endUtf8(builder);
-    } else if (typeType === Type.Date) {
-        const type = field.type(new org.apache.arrow.flatbuf.Date());
-        org.apache.arrow.flatbuf.Date.startDate(builder);
-        org.apache.arrow.flatbuf.Date.addUnit(builder, type.unit());
-        typeOffset = org.apache.arrow.flatbuf.Date.endDate(builder);
-    } else {
-        throw new Error("Unimplemented dictionary type " + typeType);
-    }
-    if (field.childrenLength() > 0) {
-      throw new Error("Dictionary encoded fields can't have children");
-    }
-    const childrenOffset = org.apache.arrow.flatbuf.Field.createChildrenVector(builder, []);
-
-    let layout;
-    const layoutOffsets = [];
-    for (let i = 0, len = field.layoutLength(); i < len; i++) {
-        layout = field.layout(i);
-        org.apache.arrow.flatbuf.VectorLayout.startVectorLayout(builder);
-        org.apache.arrow.flatbuf.VectorLayout.addBitWidth(builder, layout.bitWidth());
-        org.apache.arrow.flatbuf.VectorLayout.addType(builder, layout.type());
-        layoutOffsets.push(org.apache.arrow.flatbuf.VectorLayout.endVectorLayout(builder));
-    }
-    const layoutOffset = org.apache.arrow.flatbuf.Field.createLayoutVector(builder, layoutOffsets);
-
-    org.apache.arrow.flatbuf.Field.startField(builder);
-    org.apache.arrow.flatbuf.Field.addName(builder, nameOffset);
-    org.apache.arrow.flatbuf.Field.addNullable(builder, field.nullable());
-    org.apache.arrow.flatbuf.Field.addTypeType(builder, typeType);
-    org.apache.arrow.flatbuf.Field.addType(builder, typeOffset);
-    org.apache.arrow.flatbuf.Field.addChildren(builder, childrenOffset);
-    org.apache.arrow.flatbuf.Field.addLayout(builder, layoutOffset);
-    const offset = org.apache.arrow.flatbuf.Field.endField(builder);
-    builder.finish(offset);
-
-    return org.apache.arrow.flatbuf.Field.getRootAsField(builder.bb);
-}
-
-function Int32FromByteBuffer(bb, offset) {
-    return ((bb.bytes_[offset + 3] & 255) << 24) |
-           ((bb.bytes_[offset + 2] & 255) << 16) |
-           ((bb.bytes_[offset + 1] & 255) << 8) |
-           ((bb.bytes_[offset] & 255));
-}
-
-const MAGIC_STR = "ARROW1";
-const MAGIC = new Uint8Array(MAGIC_STR.length);
-for (let i = 0; i < MAGIC_STR.length; i++) {
-    MAGIC[i] = MAGIC_STR.charCodeAt(i);
-}
-
-function _checkMagic(buf, index) {
-    for (let i = 0; i < MAGIC.length; i++) {
-        if (MAGIC[i] !== buf[index + i]) {
-            return false;
-        }
-    }
-    return true;
-}
-
-const TYPEMAP = {};
-TYPEMAP[Type.NONE]          = "NONE";
-TYPEMAP[Type.Null]          = "Null";
-TYPEMAP[Type.Int]           = "Int";
-TYPEMAP[Type.FloatingPoint] = "FloatingPoint";
-TYPEMAP[Type.Binary]        = "Binary";
-TYPEMAP[Type.Utf8]          = "Utf8";
-TYPEMAP[Type.Bool]          = "Bool";
-TYPEMAP[Type.Decimal]       = "Decimal";
-TYPEMAP[Type.Date]          = "Date";
-TYPEMAP[Type.Time]          = "Time";
-TYPEMAP[Type.Timestamp]     = "Timestamp";
-TYPEMAP[Type.Interval]      = "Interval";
-TYPEMAP[Type.List]          = "List";
-TYPEMAP[Type.FixedSizeList] = "FixedSizeList";
-TYPEMAP[Type.Struct_]       = "Struct";
-TYPEMAP[Type.Union]         = "Union";
-
-const VECTORTYPEMAP = {};
-VECTORTYPEMAP[VectorType.OFFSET]   = "OFFSET";
-VECTORTYPEMAP[VectorType.DATA]     = "DATA";
-VECTORTYPEMAP[VectorType.VALIDITY] = "VALIDITY";
-VECTORTYPEMAP[VectorType.TYPE]     = "TYPE";
-
-function parseField(field) {
-    const children = [];
-    for (let i = 0; i < field.childrenLength(); i++) {
-        children.push(parseField(field.children(i)));
-    }
-
-    const layouts = [];
-    for (let i = 0; i < field.layoutLength(); i++) {
-        layouts.push(VECTORTYPEMAP[field.layout(i).type()]);
-    }
-
-    return {
-      children,
-      layout: layouts,
-      name: field.name(),
-      nullable: field.nullable(),
-      type: TYPEMAP[field.typeType()],
-    };
-}
-
-function parseSchema(schema) {
-    const result = [];
-    for (let i = 0, len = schema.fieldsLength(); i < len; i++) {
-        result.push(parseField(schema.fields(i)));
-    }
-    return result;
-}
-
-function loadVectors(bb, vectors: Vector[], recordBatch) {
-    const indices = { bufferIndex: 0, nodeIndex: 0 };
-    for (const vector of vectors) {
-        loadVector(bb, vector, recordBatch, indices);
-    }
-}
-
-/**
- * Loads a vector with data from a batch
- *   recordBatch: { nodes: org.apache.arrow.flatbuf.FieldNode[], buffers: { offset: number, length: number }[] }
- */
-function loadVector(bb, vector: Vector, recordBatch, indices) {
-    const node = recordBatch.nodes[indices.nodeIndex];
-    let ownBuffersLength;
-    const ownBuffers = [];
-    let i;
-    indices.nodeIndex += 1;
-
-    // dictionary vectors are always ints, so will have a data vector plus optional null vector
-    if (vector.field.dictionary() == null) {
-        ownBuffersLength = vector.field.layoutLength();
-    } else if (vector.field.nullable()) {
-        ownBuffersLength = 2;
-    } else {
-        ownBuffersLength = 1;
-    }
-
-    for (i = 0; i < ownBuffersLength; i += 1) {
-        ownBuffers.push(recordBatch.buffers[indices.bufferIndex + i]);
-    }
-    indices.bufferIndex += ownBuffersLength;
-
-    vector.loadData(bb, node, ownBuffers);
-
-    const children = vector.getChildVectors();
-    for (i = 0; i < children.length; i++) {
-        loadVector(bb, children[i], recordBatch, indices);
-    }
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/bitarray.ts
----------------------------------------------------------------------
diff --git a/js/src/bitarray.ts b/js/src/bitarray.ts
deleted file mode 100644
index 6b0a91a..0000000
--- a/js/src/bitarray.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-export class BitArray {
-    private view: Uint8Array;
-
-    constructor(buffer: ArrayBuffer, offset: number, length: number) {
-        this.view = new Uint8Array(buffer, offset || 0, Math.ceil(length / 8));
-    }
-
-    public get(i) {
-        const index = (i >> 3) | 0; // | 0 converts to an int. Math.floor works too.
-        const bit = i % 8;  // i % 8 is just as fast as i & 7
-        return (this.view[index] & (1 << bit)) !== 0;
-    }
-
-    public set(i) {
-        const index = (i >> 3) | 0;
-        const bit = i % 8;
-        this.view[index] |= 1 << bit;
-    }
-
-    public unset(i) {
-        const index = (i >> 3) | 0;
-        const bit = i % 8;
-        this.view[index] &= ~(1 << bit);
-    }
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/format/File_generated.ts
----------------------------------------------------------------------
diff --git a/js/src/format/File_generated.ts b/js/src/format/File_generated.ts
new file mode 100644
index 0000000..d0b774a
--- /dev/null
+++ b/js/src/format/File_generated.ts
@@ -0,0 +1,240 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+import { flatbuffers } from 'flatbuffers';
+import * as NS16187549871986683199 from './Schema_generated';
+/**
+ * ----------------------------------------------------------------------
+ * Arrow File metadata
+ *
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Footer {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Footer}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Footer {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Footer=} obj
+     * @returns {Footer}
+     */
+    static getRootAsFooter(bb: flatbuffers.ByteBuffer, obj?: Footer): Footer {
+      return (obj || new Footer).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.MetadataVersion}
+     */
+    version(): NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.MetadataVersion} */ (this.bb.readInt16(this.bb_pos + offset)) : NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1;
+    }
+
+    /**
+     * @param {org.apache.arrow.flatbuf.Schema=} obj
+     * @returns {org.apache.arrow.flatbuf.Schema|null}
+     */
+    schema(obj?: NS16187549871986683199.org.apache.arrow.flatbuf.Schema): NS16187549871986683199.org.apache.arrow.flatbuf.Schema | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new NS16187549871986683199.org.apache.arrow.flatbuf.Schema).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Block=} obj
+     * @returns {org.apache.arrow.flatbuf.Block}
+     */
+    dictionaries(index: number, obj?: org.apache.arrow.flatbuf.Block): org.apache.arrow.flatbuf.Block | null {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? (obj || new org.apache.arrow.flatbuf.Block).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    dictionariesLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Block=} obj
+     * @returns {org.apache.arrow.flatbuf.Block}
+     */
+    recordBatches(index: number, obj?: org.apache.arrow.flatbuf.Block): org.apache.arrow.flatbuf.Block | null {
+      let offset = this.bb.__offset(this.bb_pos, 10);
+      return offset ? (obj || new org.apache.arrow.flatbuf.Block).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    recordBatchesLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 10);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startFooter(builder: flatbuffers.Builder) {
+      builder.startObject(4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.MetadataVersion} version
+     */
+    static addVersion(builder: flatbuffers.Builder, version: NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion) {
+      builder.addFieldInt16(0, version, NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} schemaOffset
+     */
+    static addSchema(builder: flatbuffers.Builder, schemaOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, schemaOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} dictionariesOffset
+     */
+    static addDictionaries(builder: flatbuffers.Builder, dictionariesOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(2, dictionariesOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startDictionariesVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(24, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} recordBatchesOffset
+     */
+    static addRecordBatches(builder: flatbuffers.Builder, recordBatchesOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(3, recordBatchesOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startRecordBatchesVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(24, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endFooter(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} offset
+     */
+    static finishFooterBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
+      builder.finish(offset);
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Block {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Block}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Block {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * Index to the start of the RecordBlock (note this is past the Message header)
+     *
+     * @returns {flatbuffers.Long}
+     */
+    offset(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos);
+    }
+
+    /**
+     * Length of the metadata
+     *
+     * @returns {number}
+     */
+    metaDataLength(): number {
+      return this.bb.readInt32(this.bb_pos + 8);
+    }
+
+    /**
+     * Length of the data (this is aligned so there can be a gap between this and
+     * the metatdata).
+     *
+     * @returns {flatbuffers.Long}
+     */
+    bodyLength(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos + 16);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} offset
+     * @param {number} metaDataLength
+     * @param {flatbuffers.Long} bodyLength
+     * @returns {flatbuffers.Offset}
+     */
+    static createBlock(builder: flatbuffers.Builder, offset: flatbuffers.Long, metaDataLength: number, bodyLength: flatbuffers.Long): flatbuffers.Offset {
+      builder.prep(8, 24);
+      builder.writeInt64(bodyLength);
+      builder.pad(4);
+      builder.writeInt32(metaDataLength);
+      builder.writeInt64(offset);
+      return builder.offset();
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/src/format/Message_generated.ts
----------------------------------------------------------------------
diff --git a/js/src/format/Message_generated.ts b/js/src/format/Message_generated.ts
new file mode 100644
index 0000000..daa781f
--- /dev/null
+++ b/js/src/format/Message_generated.ts
@@ -0,0 +1,469 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+import { flatbuffers } from 'flatbuffers';
+import * as NS16187549871986683199 from './Schema_generated';
+export namespace org.apache.arrow.flatbuf {
+  export import Schema = NS16187549871986683199.org.apache.arrow.flatbuf.Schema;
+}
+/**
+ * ----------------------------------------------------------------------
+ * The root Message type
+ * This union enables us to easily send different message types without
+ * redundant storage, and in the future we can easily add new message types.
+ *
+ * Arrow implementations do not need to implement all of the message types,
+ * which may include experimental metadata types. For maximum compatibility,
+ * it is best to send data using RecordBatch
+ *
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum MessageHeader {
+    NONE = 0,
+    Schema = 1,
+    DictionaryBatch = 2,
+    RecordBatch = 3,
+    Tensor = 4
+  }
+}
+
+/**
+ * ----------------------------------------------------------------------
+ * Data structures for describing a table row batch (a collection of
+ * equal-length Arrow arrays)
+ * Metadata about a field at some level of a nested type tree (but not
+ * its children).
+ *
+ * For example, a List<Int16> with values [[1, 2, 3], null, [4], [5, 6], null]
+ * would have {length: 5, null_count: 2} for its List node, and {length: 6,
+ * null_count: 0} for its Int16 node, as separate FieldNode structs
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class FieldNode {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {FieldNode}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): FieldNode {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * The number of value slots in the Arrow array at this level of a nested
+     * tree
+     *
+     * @returns {flatbuffers.Long}
+     */
+    length(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos);
+    }
+
+    /**
+     * The number of observed nulls. Fields with null_count == 0 may choose not
+     * to write their physical validity bitmap out as a materialized buffer,
+     * instead setting the length of the bitmap buffer to 0.
+     *
+     * @returns {flatbuffers.Long}
+     */
+    nullCount(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos + 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} length
+     * @param {flatbuffers.Long} null_count
+     * @returns {flatbuffers.Offset}
+     */
+    static createFieldNode(builder: flatbuffers.Builder, length: flatbuffers.Long, null_count: flatbuffers.Long): flatbuffers.Offset {
+      builder.prep(8, 16);
+      builder.writeInt64(null_count);
+      builder.writeInt64(length);
+      return builder.offset();
+    }
+
+  }
+}
+/**
+ * A data header describing the shared memory layout of a "record" or "row"
+ * batch. Some systems call this a "row batch" internally and others a "record
+ * batch".
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class RecordBatch {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {RecordBatch}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): RecordBatch {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {RecordBatch=} obj
+     * @returns {RecordBatch}
+     */
+    static getRootAsRecordBatch(bb: flatbuffers.ByteBuffer, obj?: RecordBatch): RecordBatch {
+      return (obj || new RecordBatch).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * number of records / rows. The arrays in the batch should all have this
+     * length
+     *
+     * @returns {flatbuffers.Long}
+     */
+    length(): flatbuffers.Long {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+    }
+
+    /**
+     * Nodes correspond to the pre-ordered flattened logical schema
+     *
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.FieldNode=} obj
+     * @returns {org.apache.arrow.flatbuf.FieldNode}
+     */
+    nodes(index: number, obj?: org.apache.arrow.flatbuf.FieldNode): org.apache.arrow.flatbuf.FieldNode | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new org.apache.arrow.flatbuf.FieldNode).__init(this.bb.__vector(this.bb_pos + offset) + index * 16, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    nodesLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * Buffers correspond to the pre-ordered flattened buffer tree
+     *
+     * The number of buffers appended to this list depends on the schema. For
+     * example, most primitive arrays will have 2 buffers, 1 for the validity
+     * bitmap and 1 for the values. For struct arrays, there will only be a
+     * single buffer for the validity (nulls) bitmap
+     *
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Buffer=} obj
+     * @returns {org.apache.arrow.flatbuf.Buffer}
+     */
+    buffers(index: number, obj?: NS16187549871986683199.org.apache.arrow.flatbuf.Buffer): NS16187549871986683199.org.apache.arrow.flatbuf.Buffer | null {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? (obj || new NS16187549871986683199.org.apache.arrow.flatbuf.Buffer).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    buffersLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startRecordBatch(builder: flatbuffers.Builder) {
+      builder.startObject(3);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} length
+     */
+    static addLength(builder: flatbuffers.Builder, length: flatbuffers.Long) {
+      builder.addFieldInt64(0, length, builder.createLong(0, 0));
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} nodesOffset
+     */
+    static addNodes(builder: flatbuffers.Builder, nodesOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, nodesOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startNodesVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(16, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} buffersOffset
+     */
+    static addBuffers(builder: flatbuffers.Builder, buffersOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(2, buffersOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startBuffersVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(24, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endRecordBatch(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * For sending dictionary encoding information. Any Field can be
+ * dictionary-encoded, but in this case none of its children may be
+ * dictionary-encoded.
+ * There is one vector / column per dictionary
+ *
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class DictionaryBatch {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {DictionaryBatch}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): DictionaryBatch {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {DictionaryBatch=} obj
+     * @returns {DictionaryBatch}
+     */
+    static getRootAsDictionaryBatch(bb: flatbuffers.ByteBuffer, obj?: DictionaryBatch): DictionaryBatch {
+      return (obj || new DictionaryBatch).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {flatbuffers.Long}
+     */
+    id(): flatbuffers.Long {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+    }
+
+    /**
+     * @param {org.apache.arrow.flatbuf.RecordBatch=} obj
+     * @returns {org.apache.arrow.flatbuf.RecordBatch|null}
+     */
+    data(obj?: org.apache.arrow.flatbuf.RecordBatch): org.apache.arrow.flatbuf.RecordBatch | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new org.apache.arrow.flatbuf.RecordBatch).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startDictionaryBatch(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} id
+     */
+    static addId(builder: flatbuffers.Builder, id: flatbuffers.Long) {
+      builder.addFieldInt64(0, id, builder.createLong(0, 0));
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} dataOffset
+     */
+    static addData(builder: flatbuffers.Builder, dataOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, dataOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endDictionaryBatch(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Message {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Message}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Message {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Message=} obj
+     * @returns {Message}
+     */
+    static getRootAsMessage(bb: flatbuffers.ByteBuffer, obj?: Message): Message {
+      return (obj || new Message).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.MetadataVersion}
+     */
+    version(): NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.MetadataVersion} */ (this.bb.readInt16(this.bb_pos + offset)) : NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1;
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.MessageHeader}
+     */
+    headerType(): org.apache.arrow.flatbuf.MessageHeader {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? /** @type {org.apache.arrow.flatbuf.MessageHeader} */ (this.bb.readUint8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.MessageHeader.NONE;
+    }
+
+    /**
+     * @param {flatbuffers.Table} obj
+     * @returns {?flatbuffers.Table}
+     */
+    header<T extends flatbuffers.Table>(obj: T): T | null {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? this.bb.__union(obj, this.bb_pos + offset) : null;
+    }
+
+    /**
+     * @returns {flatbuffers.Long}
+     */
+    bodyLength(): flatbuffers.Long {
+      let offset = this.bb.__offset(this.bb_pos, 10);
+      return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startMessage(builder: flatbuffers.Builder) {
+      builder.startObject(4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.MetadataVersion} version
+     */
+    static addVersion(builder: flatbuffers.Builder, version: NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion) {
+      builder.addFieldInt16(0, version, NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.MessageHeader} headerType
+     */
+    static addHeaderType(builder: flatbuffers.Builder, headerType: org.apache.arrow.flatbuf.MessageHeader) {
+      builder.addFieldInt8(1, headerType, org.apache.arrow.flatbuf.MessageHeader.NONE);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} headerOffset
+     */
+    static addHeader(builder: flatbuffers.Builder, headerOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(2, headerOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} bodyLength
+     */
+    static addBodyLength(builder: flatbuffers.Builder, bodyLength: flatbuffers.Long) {
+      builder.addFieldInt64(3, bodyLength, builder.createLong(0, 0));
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endMessage(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} offset
+     */
+    static finishMessageBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
+      builder.finish(offset);
+    }
+
+  }
+}


[5/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/closure-compiler-scripts/flatbuffers.js
----------------------------------------------------------------------
diff --git a/js/closure-compiler-scripts/flatbuffers.js b/js/closure-compiler-scripts/flatbuffers.js
new file mode 100644
index 0000000..e51a4a0
--- /dev/null
+++ b/js/closure-compiler-scripts/flatbuffers.js
@@ -0,0 +1,1204 @@
+/**
+ * closure-compiler-friendly flatbuffers
+ * copied from node_modules/flatbuffers/js/flatbuffers.js
+ * update as needed
+ */
+
+ /// @file
+/// @addtogroup flatbuffers_javascript_api
+/// @{
+/// @cond FLATBUFFERS_INTERNAL
+
+goog.module("module$flatbuffers");
+goog.module.declareLegacyNamespace();
+/**
+ * @fileoverview
+ *
+ * Need to suppress 'global this' error so the Node.js export line doesn't cause
+ * closure compile to error out.
+ * @suppress {globalThis}
+ */
+
+/**
+ * @const
+ * @namespace
+ */
+var flatbuffers = {};
+
+/**
+ * @typedef {number}
+ */
+flatbuffers.Offset;
+
+/**
+ * @typedef {{
+ *   bb: flatbuffers.ByteBuffer,
+ *   bb_pos: number
+ * }}
+ */
+flatbuffers.Table;
+
+/**
+ * @type {number}
+ * @const
+ */
+flatbuffers.SIZEOF_SHORT = 2;
+
+/**
+ * @type {number}
+ * @const
+ */
+flatbuffers.SIZEOF_INT = 4;
+
+/**
+ * @type {number}
+ * @const
+ */
+flatbuffers.FILE_IDENTIFIER_LENGTH = 4;
+
+/**
+ * @enum {number}
+ */
+flatbuffers.Encoding = {
+  UTF8_BYTES: 1,
+  UTF16_STRING: 2
+};
+
+/**
+ * @type {Int32Array}
+ * @const
+ */
+flatbuffers.int32 = new Int32Array(2);
+
+/**
+ * @type {Float32Array}
+ * @const
+ */
+flatbuffers.float32 = new Float32Array(flatbuffers.int32.buffer);
+
+/**
+ * @type {Float64Array}
+ * @const
+ */
+flatbuffers.float64 = new Float64Array(flatbuffers.int32.buffer);
+
+/**
+ * @type {boolean}
+ * @const
+ */
+flatbuffers.isLittleEndian = new Uint16Array(new Uint8Array([1, 0]).buffer)[0] === 1;
+
+////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * @constructor
+ * @param {number} low
+ * @param {number} high
+ */
+flatbuffers.Long = function(low, high) {
+  /**
+   * @type {number}
+   * @const
+   */
+  this.low = low | 0;
+
+  /**
+   * @type {number}
+   * @const
+   */
+  this.high = high | 0;
+};
+
+/**
+ * @param {number} low
+ * @param {number} high
+ * @returns {flatbuffers.Long}
+ */
+flatbuffers.Long.create = function(low, high) {
+  // Special-case zero to avoid GC overhead for default values
+  return low == 0 && high == 0 ? flatbuffers.Long.ZERO : new flatbuffers.Long(low, high);
+};
+
+/**
+ * @returns {number}
+ */
+flatbuffers.Long.prototype.toFloat64 = function() {
+  return (this.low >>> 0) + this.high * 0x100000000;
+};
+
+/**
+ * @param {flatbuffers.Long} other
+ * @returns {boolean}
+ */
+flatbuffers.Long.prototype.equals = function(other) {
+  return this.low == other.low && this.high == other.high;
+};
+
+/**
+ * @type {flatbuffers.Long}
+ * @const
+ */
+flatbuffers.Long.ZERO = new flatbuffers.Long(0, 0);
+
+/// @endcond
+////////////////////////////////////////////////////////////////////////////////
+/**
+ * Create a FlatBufferBuilder.
+ *
+ * @constructor
+ * @param {number=} opt_initial_size
+ */
+flatbuffers.Builder = function(opt_initial_size) {
+  if (!opt_initial_size) {
+    var initial_size = 1024;
+  } else {
+    var initial_size = opt_initial_size;
+  }
+
+  /**
+   * @type {flatbuffers.ByteBuffer}
+   * @private
+   */
+  this.bb = flatbuffers.ByteBuffer.allocate(initial_size);
+
+  /**
+   * Remaining space in the ByteBuffer.
+   *
+   * @type {number}
+   * @private
+   */
+  this.space = initial_size;
+
+  /**
+   * Minimum alignment encountered so far.
+   *
+   * @type {number}
+   * @private
+   */
+  this.minalign = 1;
+
+  /**
+   * The vtable for the current table.
+   *
+   * @type {Array.<number>}
+   * @private
+   */
+  this.vtable = null;
+
+  /**
+   * The amount of fields we're actually using.
+   *
+   * @type {number}
+   * @private
+   */
+  this.vtable_in_use = 0;
+
+  /**
+   * Whether we are currently serializing a table.
+   *
+   * @type {boolean}
+   * @private
+   */
+  this.isNested = false;
+
+  /**
+   * Starting offset of the current struct/table.
+   *
+   * @type {number}
+   * @private
+   */
+  this.object_start = 0;
+
+  /**
+   * List of offsets of all vtables.
+   *
+   * @type {Array.<number>}
+   * @private
+   */
+  this.vtables = [];
+
+  /**
+   * For the current vector being built.
+   *
+   * @type {number}
+   * @private
+   */
+  this.vector_num_elems = 0;
+
+  /**
+   * False omits default values from the serialized data
+   *
+   * @type {boolean}
+   * @private
+   */
+  this.force_defaults = false;
+};
+
+/**
+ * In order to save space, fields that are set to their default value
+ * don't get serialized into the buffer. Forcing defaults provides a
+ * way to manually disable this optimization.
+ *
+ * @param {boolean} forceDefaults true always serializes default values
+ */
+flatbuffers.Builder.prototype.forceDefaults = function(forceDefaults) {
+  this.force_defaults = forceDefaults;
+};
+
+/**
+ * Get the ByteBuffer representing the FlatBuffer. Only call this after you've
+ * called finish(). The actual data starts at the ByteBuffer's current position,
+ * not necessarily at 0.
+ *
+ * @returns {flatbuffers.ByteBuffer}
+ */
+flatbuffers.Builder.prototype.dataBuffer = function() {
+  return this.bb;
+};
+
+/**
+ * Get the bytes representing the FlatBuffer. Only call this after you've
+ * called finish().
+ *
+ * @returns {Uint8Array}
+ */
+flatbuffers.Builder.prototype.asUint8Array = function() {
+  return this.bb.bytes().subarray(this.bb.position(), this.bb.position() + this.offset());
+};
+
+/// @cond FLATBUFFERS_INTERNAL
+/**
+ * Prepare to write an element of `size` after `additional_bytes` have been
+ * written, e.g. if you write a string, you need to align such the int length
+ * field is aligned to 4 bytes, and the string data follows it directly. If all
+ * you need to do is alignment, `additional_bytes` will be 0.
+ *
+ * @param {number} size This is the of the new element to write
+ * @param {number} additional_bytes The padding size
+ */
+flatbuffers.Builder.prototype.prep = function(size, additional_bytes) {
+  // Track the biggest thing we've ever aligned to.
+  if (size > this.minalign) {
+    this.minalign = size;
+  }
+
+  // Find the amount of alignment needed such that `size` is properly
+  // aligned after `additional_bytes`
+  var align_size = ((~(this.bb.capacity() - this.space + additional_bytes)) + 1) & (size - 1);
+
+  // Reallocate the buffer if needed.
+  while (this.space < align_size + size + additional_bytes) {
+    var old_buf_size = this.bb.capacity();
+    this.bb = flatbuffers.Builder.growByteBuffer(this.bb);
+    this.space += this.bb.capacity() - old_buf_size;
+  }
+
+  this.pad(align_size);
+};
+
+/**
+ * @param {number} byte_size
+ */
+flatbuffers.Builder.prototype.pad = function(byte_size) {
+  for (var i = 0; i < byte_size; i++) {
+    this.bb.writeInt8(--this.space, 0);
+  }
+};
+
+/**
+ * @param {number} value
+ */
+flatbuffers.Builder.prototype.writeInt8 = function(value) {
+  this.bb.writeInt8(this.space -= 1, value);
+};
+
+/**
+ * @param {number} value
+ */
+flatbuffers.Builder.prototype.writeInt16 = function(value) {
+  this.bb.writeInt16(this.space -= 2, value);
+};
+
+/**
+ * @param {number} value
+ */
+flatbuffers.Builder.prototype.writeInt32 = function(value) {
+  this.bb.writeInt32(this.space -= 4, value);
+};
+
+/**
+ * @param {flatbuffers.Long} value
+ */
+flatbuffers.Builder.prototype.writeInt64 = function(value) {
+  this.bb.writeInt64(this.space -= 8, value);
+};
+
+/**
+ * @param {number} value
+ */
+flatbuffers.Builder.prototype.writeFloat32 = function(value) {
+  this.bb.writeFloat32(this.space -= 4, value);
+};
+
+/**
+ * @param {number} value
+ */
+flatbuffers.Builder.prototype.writeFloat64 = function(value) {
+  this.bb.writeFloat64(this.space -= 8, value);
+};
+/// @endcond
+
+/**
+ * Add an `int8` to the buffer, properly aligned, and grows the buffer (if necessary).
+ * @param {number} value The `int8` to add the the buffer.
+ */
+flatbuffers.Builder.prototype.addInt8 = function(value) {
+  this.prep(1, 0);
+  this.writeInt8(value);
+};
+
+/**
+ * Add an `int16` to the buffer, properly aligned, and grows the buffer (if necessary).
+ * @param {number} value The `int16` to add the the buffer.
+ */
+flatbuffers.Builder.prototype.addInt16 = function(value) {
+  this.prep(2, 0);
+  this.writeInt16(value);
+};
+
+/**
+ * Add an `int32` to the buffer, properly aligned, and grows the buffer (if necessary).
+ * @param {number} value The `int32` to add the the buffer.
+ */
+flatbuffers.Builder.prototype.addInt32 = function(value) {
+  this.prep(4, 0);
+  this.writeInt32(value);
+};
+
+/**
+ * Add an `int64` to the buffer, properly aligned, and grows the buffer (if necessary).
+ * @param {flatbuffers.Long} value The `int64` to add the the buffer.
+ */
+flatbuffers.Builder.prototype.addInt64 = function(value) {
+  this.prep(8, 0);
+  this.writeInt64(value);
+};
+
+/**
+ * Add a `float32` to the buffer, properly aligned, and grows the buffer (if necessary).
+ * @param {number} value The `float32` to add the the buffer.
+ */
+flatbuffers.Builder.prototype.addFloat32 = function(value) {
+  this.prep(4, 0);
+  this.writeFloat32(value);
+};
+
+/**
+ * Add a `float64` to the buffer, properly aligned, and grows the buffer (if necessary).
+ * @param {number} value The `float64` to add the the buffer.
+ */
+flatbuffers.Builder.prototype.addFloat64 = function(value) {
+  this.prep(8, 0);
+  this.writeFloat64(value);
+};
+
+/// @cond FLATBUFFERS_INTERNAL
+/**
+ * @param {number} voffset
+ * @param {number} value
+ * @param {number} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldInt8 = function(voffset, value, defaultValue) {
+  if (this.force_defaults || value != defaultValue) {
+    this.addInt8(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * @param {number} voffset
+ * @param {number} value
+ * @param {number} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldInt16 = function(voffset, value, defaultValue) {
+  if (this.force_defaults || value != defaultValue) {
+    this.addInt16(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * @param {number} voffset
+ * @param {number} value
+ * @param {number} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldInt32 = function(voffset, value, defaultValue) {
+  if (this.force_defaults || value != defaultValue) {
+    this.addInt32(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * @param {number} voffset
+ * @param {flatbuffers.Long} value
+ * @param {flatbuffers.Long} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldInt64 = function(voffset, value, defaultValue) {
+  if (this.force_defaults || !value.equals(defaultValue)) {
+    this.addInt64(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * @param {number} voffset
+ * @param {number} value
+ * @param {number} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldFloat32 = function(voffset, value, defaultValue) {
+  if (this.force_defaults || value != defaultValue) {
+    this.addFloat32(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * @param {number} voffset
+ * @param {number} value
+ * @param {number} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldFloat64 = function(voffset, value, defaultValue) {
+  if (this.force_defaults || value != defaultValue) {
+    this.addFloat64(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * @param {number} voffset
+ * @param {flatbuffers.Offset} value
+ * @param {flatbuffers.Offset} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldOffset = function(voffset, value, defaultValue) {
+  if (this.force_defaults || value != defaultValue) {
+    this.addOffset(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * Structs are stored inline, so nothing additional is being added. `d` is always 0.
+ *
+ * @param {number} voffset
+ * @param {flatbuffers.Offset} value
+ * @param {flatbuffers.Offset} defaultValue
+ */
+flatbuffers.Builder.prototype.addFieldStruct = function(voffset, value, defaultValue) {
+  if (value != defaultValue) {
+    this.nested(value);
+    this.slot(voffset);
+  }
+};
+
+/**
+ * Structures are always stored inline, they need to be created right
+ * where they're used.  You'll get this assertion failure if you
+ * created it elsewhere.
+ *
+ * @param {flatbuffers.Offset} obj The offset of the created object
+ */
+flatbuffers.Builder.prototype.nested = function(obj) {
+  if (obj != this.offset()) {
+    throw new Error('FlatBuffers: struct must be serialized inline.');
+  }
+};
+
+/**
+ * Should not be creating any other object, string or vector
+ * while an object is being constructed
+ */
+flatbuffers.Builder.prototype.notNested = function() {
+  if (this.isNested) {
+    throw new Error('FlatBuffers: object serialization must not be nested.');
+  }
+};
+
+/**
+ * Set the current vtable at `voffset` to the current location in the buffer.
+ *
+ * @param {number} voffset
+ */
+flatbuffers.Builder.prototype.slot = function(voffset) {
+  this.vtable[voffset] = this.offset();
+};
+
+/**
+ * @returns {flatbuffers.Offset} Offset relative to the end of the buffer.
+ */
+flatbuffers.Builder.prototype.offset = function() {
+  return this.bb.capacity() - this.space;
+};
+
+/**
+ * Doubles the size of the backing ByteBuffer and copies the old data towards
+ * the end of the new buffer (since we build the buffer backwards).
+ *
+ * @param {flatbuffers.ByteBuffer} bb The current buffer with the existing data
+ * @returns {flatbuffers.ByteBuffer} A new byte buffer with the old data copied
+ * to it. The data is located at the end of the buffer.
+ *
+ * uint8Array.set() formally takes {Array<number>|ArrayBufferView}, so to pass
+ * it a uint8Array we need to suppress the type check:
+ * @suppress {checkTypes}
+ */
+flatbuffers.Builder.growByteBuffer = function(bb) {
+  var old_buf_size = bb.capacity();
+
+  // Ensure we don't grow beyond what fits in an int.
+  if (old_buf_size & 0xC0000000) {
+    throw new Error('FlatBuffers: cannot grow buffer beyond 2 gigabytes.');
+  }
+
+  var new_buf_size = old_buf_size << 1;
+  var nbb = flatbuffers.ByteBuffer.allocate(new_buf_size);
+  nbb.setPosition(new_buf_size - old_buf_size);
+  nbb.bytes().set(bb.bytes(), new_buf_size - old_buf_size);
+  return nbb;
+};
+/// @endcond
+
+/**
+ * Adds on offset, relative to where it will be written.
+ *
+ * @param {flatbuffers.Offset} offset The offset to add.
+ */
+flatbuffers.Builder.prototype.addOffset = function(offset) {
+  this.prep(flatbuffers.SIZEOF_INT, 0); // Ensure alignment is already done.
+  this.writeInt32(this.offset() - offset + flatbuffers.SIZEOF_INT);
+};
+
+/// @cond FLATBUFFERS_INTERNAL
+/**
+ * Start encoding a new object in the buffer.  Users will not usually need to
+ * call this directly. The FlatBuffers compiler will generate helper methods
+ * that call this method internally.
+ *
+ * @param {number} numfields
+ */
+flatbuffers.Builder.prototype.startObject = function(numfields) {
+  this.notNested();
+  if (this.vtable == null) {
+    this.vtable = [];
+  }
+  this.vtable_in_use = numfields;
+  for (var i = 0; i < numfields; i++) {
+    this.vtable[i] = 0; // This will push additional elements as needed
+  }
+  this.isNested = true;
+  this.object_start = this.offset();
+};
+
+/**
+ * Finish off writing the object that is under construction.
+ *
+ * @returns {flatbuffers.Offset} The offset to the object inside `dataBuffer`
+ */
+flatbuffers.Builder.prototype.endObject = function() {
+  if (this.vtable == null || !this.isNested) {
+    throw new Error('FlatBuffers: endObject called without startObject');
+  }
+
+  this.addInt32(0);
+  var vtableloc = this.offset();
+
+  // Write out the current vtable.
+  for (var i = this.vtable_in_use - 1; i >= 0; i--) {
+    // Offset relative to the start of the table.
+    this.addInt16(this.vtable[i] != 0 ? vtableloc - this.vtable[i] : 0);
+  }
+
+  var standard_fields = 2; // The fields below:
+  this.addInt16(vtableloc - this.object_start);
+  this.addInt16((this.vtable_in_use + standard_fields) * flatbuffers.SIZEOF_SHORT);
+
+  // Search for an existing vtable that matches the current one.
+  var existing_vtable = 0;
+outer_loop:
+  for (var i = 0; i < this.vtables.length; i++) {
+    var vt1 = this.bb.capacity() - this.vtables[i];
+    var vt2 = this.space;
+    var len = this.bb.readInt16(vt1);
+    if (len == this.bb.readInt16(vt2)) {
+      for (var j = flatbuffers.SIZEOF_SHORT; j < len; j += flatbuffers.SIZEOF_SHORT) {
+        if (this.bb.readInt16(vt1 + j) != this.bb.readInt16(vt2 + j)) {
+          continue outer_loop;
+        }
+      }
+      existing_vtable = this.vtables[i];
+      break;
+    }
+  }
+
+  if (existing_vtable) {
+    // Found a match:
+    // Remove the current vtable.
+    this.space = this.bb.capacity() - vtableloc;
+
+    // Point table to existing vtable.
+    this.bb.writeInt32(this.space, existing_vtable - vtableloc);
+  } else {
+    // No match:
+    // Add the location of the current vtable to the list of vtables.
+    this.vtables.push(this.offset());
+
+    // Point table to current vtable.
+    this.bb.writeInt32(this.bb.capacity() - vtableloc, this.offset() - vtableloc);
+  }
+
+  this.isNested = false;
+  return vtableloc;
+};
+/// @endcond
+
+/**
+ * Finalize a buffer, poiting to the given `root_table`.
+ *
+ * @param {flatbuffers.Offset} root_table
+ * @param {string=} opt_file_identifier
+ */
+flatbuffers.Builder.prototype.finish = function(root_table, opt_file_identifier) {
+  if (opt_file_identifier) {
+    var file_identifier = opt_file_identifier;
+    this.prep(this.minalign, flatbuffers.SIZEOF_INT +
+      flatbuffers.FILE_IDENTIFIER_LENGTH);
+    if (file_identifier.length != flatbuffers.FILE_IDENTIFIER_LENGTH) {
+      throw new Error('FlatBuffers: file identifier must be length ' +
+        flatbuffers.FILE_IDENTIFIER_LENGTH);
+    }
+    for (var i = flatbuffers.FILE_IDENTIFIER_LENGTH - 1; i >= 0; i--) {
+      this.writeInt8(file_identifier.charCodeAt(i));
+    }
+  }
+  this.prep(this.minalign, flatbuffers.SIZEOF_INT);
+  this.addOffset(root_table);
+  this.bb.setPosition(this.space);
+};
+
+/// @cond FLATBUFFERS_INTERNAL
+/**
+ * This checks a required field has been set in a given table that has
+ * just been constructed.
+ *
+ * @param {flatbuffers.Offset} table
+ * @param {number} field
+ */
+flatbuffers.Builder.prototype.requiredField = function(table, field) {
+  var table_start = this.bb.capacity() - table;
+  var vtable_start = table_start - this.bb.readInt32(table_start);
+  var ok = this.bb.readInt16(vtable_start + field) != 0;
+
+  // If this fails, the caller will show what field needs to be set.
+  if (!ok) {
+    throw new Error('FlatBuffers: field ' + field + ' must be set');
+  }
+};
+
+/**
+ * Start a new array/vector of objects.  Users usually will not call
+ * this directly. The FlatBuffers compiler will create a start/end
+ * method for vector types in generated code.
+ *
+ * @param {number} elem_size The size of each element in the array
+ * @param {number} num_elems The number of elements in the array
+ * @param {number} alignment The alignment of the array
+ */
+flatbuffers.Builder.prototype.startVector = function(elem_size, num_elems, alignment) {
+  this.notNested();
+  this.vector_num_elems = num_elems;
+  this.prep(flatbuffers.SIZEOF_INT, elem_size * num_elems);
+  this.prep(alignment, elem_size * num_elems); // Just in case alignment > int.
+};
+
+/**
+ * Finish off the creation of an array and all its elements. The array must be
+ * created with `startVector`.
+ *
+ * @returns {flatbuffers.Offset} The offset at which the newly created array
+ * starts.
+ */
+flatbuffers.Builder.prototype.endVector = function() {
+  this.writeInt32(this.vector_num_elems);
+  return this.offset();
+};
+/// @endcond
+
+/**
+ * Encode the string `s` in the buffer using UTF-8. If a Uint8Array is passed
+ * instead of a string, it is assumed to contain valid UTF-8 encoded data.
+ *
+ * @param {string|Uint8Array} s The string to encode
+ * @return {flatbuffers.Offset} The offset in the buffer where the encoded string starts
+ */
+flatbuffers.Builder.prototype.createString = function(s) {
+  if (s instanceof Uint8Array) {
+    var utf8 = s;
+  } else {
+    var utf8 = [];
+    var i = 0;
+
+    while (i < s.length) {
+      var codePoint;
+
+      // Decode UTF-16
+      var a = s.charCodeAt(i++);
+      if (a < 0xD800 || a >= 0xDC00) {
+        codePoint = a;
+      } else {
+        var b = s.charCodeAt(i++);
+        codePoint = (a << 10) + b + (0x10000 - (0xD800 << 10) - 0xDC00);
+      }
+
+      // Encode UTF-8
+      if (codePoint < 0x80) {
+        utf8.push(codePoint);
+      } else {
+        if (codePoint < 0x800) {
+          utf8.push(((codePoint >> 6) & 0x1F) | 0xC0);
+        } else {
+          if (codePoint < 0x10000) {
+            utf8.push(((codePoint >> 12) & 0x0F) | 0xE0);
+          } else {
+            utf8.push(
+              ((codePoint >> 18) & 0x07) | 0xF0,
+              ((codePoint >> 12) & 0x3F) | 0x80);
+          }
+          utf8.push(((codePoint >> 6) & 0x3F) | 0x80);
+        }
+        utf8.push((codePoint & 0x3F) | 0x80);
+      }
+    }
+  }
+
+  this.addInt8(0);
+  this.startVector(1, utf8.length, 1);
+  this.bb.setPosition(this.space -= utf8.length);
+  for (var i = 0, offset = this.space, bytes = this.bb.bytes(); i < utf8.length; i++) {
+    bytes[offset++] = utf8[i];
+  }
+  return this.endVector();
+};
+
+/**
+ * A helper function to avoid generated code depending on this file directly.
+ *
+ * @param {number} low
+ * @param {number} high
+ * @returns {flatbuffers.Long}
+ */
+flatbuffers.Builder.prototype.createLong = function(low, high) {
+  return flatbuffers.Long.create(low, high);
+};
+////////////////////////////////////////////////////////////////////////////////
+/// @cond FLATBUFFERS_INTERNAL
+/**
+ * Create a new ByteBuffer with a given array of bytes (`Uint8Array`).
+ *
+ * @constructor
+ * @param {Uint8Array} bytes
+ */
+flatbuffers.ByteBuffer = function(bytes) {
+  /**
+   * @type {Uint8Array}
+   * @private
+   */
+  this.bytes_ = bytes;
+
+  /**
+   * @type {number}
+   * @private
+   */
+  this.position_ = 0;
+};
+
+/**
+ * Create and allocate a new ByteBuffer with a given size.
+ *
+ * @param {number} byte_size
+ * @returns {flatbuffers.ByteBuffer}
+ */
+flatbuffers.ByteBuffer.allocate = function(byte_size) {
+  return new flatbuffers.ByteBuffer(new Uint8Array(byte_size));
+};
+
+/**
+ * Get the underlying `Uint8Array`.
+ *
+ * @returns {Uint8Array}
+ */
+flatbuffers.ByteBuffer.prototype.bytes = function() {
+  return this.bytes_;
+};
+
+/**
+ * Get the buffer's position.
+ *
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.position = function() {
+  return this.position_;
+};
+
+/**
+ * Set the buffer's position.
+ *
+ * @param {number} position
+ */
+flatbuffers.ByteBuffer.prototype.setPosition = function(position) {
+  this.position_ = position;
+};
+
+/**
+ * Get the buffer's capacity.
+ *
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.capacity = function() {
+  return this.bytes_.length;
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readInt8 = function(offset) {
+  return this.readUint8(offset) << 24 >> 24;
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readUint8 = function(offset) {
+  return this.bytes_[offset];
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readInt16 = function(offset) {
+  return this.readUint16(offset) << 16 >> 16;
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readUint16 = function(offset) {
+  return this.bytes_[offset] | this.bytes_[offset + 1] << 8;
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readInt32 = function(offset) {
+  return this.bytes_[offset] | this.bytes_[offset + 1] << 8 | this.bytes_[offset + 2] << 16 | this.bytes_[offset + 3] << 24;
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readUint32 = function(offset) {
+  return this.readInt32(offset) >>> 0;
+};
+
+/**
+ * @param {number} offset
+ * @returns {flatbuffers.Long}
+ */
+flatbuffers.ByteBuffer.prototype.readInt64 = function(offset) {
+  return new flatbuffers.Long(this.readInt32(offset), this.readInt32(offset + 4));
+};
+
+/**
+ * @param {number} offset
+ * @returns {flatbuffers.Long}
+ */
+flatbuffers.ByteBuffer.prototype.readUint64 = function(offset) {
+  return new flatbuffers.Long(this.readUint32(offset), this.readUint32(offset + 4));
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readFloat32 = function(offset) {
+  flatbuffers.int32[0] = this.readInt32(offset);
+  return flatbuffers.float32[0];
+};
+
+/**
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.readFloat64 = function(offset) {
+  flatbuffers.int32[flatbuffers.isLittleEndian ? 0 : 1] = this.readInt32(offset);
+  flatbuffers.int32[flatbuffers.isLittleEndian ? 1 : 0] = this.readInt32(offset + 4);
+  return flatbuffers.float64[0];
+};
+
+/**
+ * @param {number} offset
+ * @param {number|boolean} value
+ */
+flatbuffers.ByteBuffer.prototype.writeInt8 = function(offset, value) {
+  this.bytes_[offset] = /** @type {number} */(value);
+};
+
+/**
+ * @param {number} offset
+ * @param {number} value
+ */
+flatbuffers.ByteBuffer.prototype.writeUint8 = function(offset, value) {
+  this.bytes_[offset] = value;
+};
+
+/**
+ * @param {number} offset
+ * @param {number} value
+ */
+flatbuffers.ByteBuffer.prototype.writeInt16 = function(offset, value) {
+  this.bytes_[offset] = value;
+  this.bytes_[offset + 1] = value >> 8;
+};
+
+/**
+ * @param {number} offset
+ * @param {number} value
+ */
+flatbuffers.ByteBuffer.prototype.writeUint16 = function(offset, value) {
+    this.bytes_[offset] = value;
+    this.bytes_[offset + 1] = value >> 8;
+};
+
+/**
+ * @param {number} offset
+ * @param {number} value
+ */
+flatbuffers.ByteBuffer.prototype.writeInt32 = function(offset, value) {
+  this.bytes_[offset] = value;
+  this.bytes_[offset + 1] = value >> 8;
+  this.bytes_[offset + 2] = value >> 16;
+  this.bytes_[offset + 3] = value >> 24;
+};
+
+/**
+ * @param {number} offset
+ * @param {number} value
+ */
+flatbuffers.ByteBuffer.prototype.writeUint32 = function(offset, value) {
+    this.bytes_[offset] = value;
+    this.bytes_[offset + 1] = value >> 8;
+    this.bytes_[offset + 2] = value >> 16;
+    this.bytes_[offset + 3] = value >> 24;
+};
+
+/**
+ * @param {number} offset
+ * @param {flatbuffers.Long} value
+ */
+flatbuffers.ByteBuffer.prototype.writeInt64 = function(offset, value) {
+  this.writeInt32(offset, value.low);
+  this.writeInt32(offset + 4, value.high);
+};
+
+/**
+ * @param {number} offset
+ * @param {flatbuffers.Long} value
+ */
+flatbuffers.ByteBuffer.prototype.writeUint64 = function(offset, value) {
+    this.writeUint32(offset, value.low);
+    this.writeUint32(offset + 4, value.high);
+};
+
+/**
+ * @param {number} offset
+ * @param {number} value
+ */
+flatbuffers.ByteBuffer.prototype.writeFloat32 = function(offset, value) {
+  flatbuffers.float32[0] = value;
+  this.writeInt32(offset, flatbuffers.int32[0]);
+};
+
+/**
+ * @param {number} offset
+ * @param {number} value
+ */
+flatbuffers.ByteBuffer.prototype.writeFloat64 = function(offset, value) {
+  flatbuffers.float64[0] = value;
+  this.writeInt32(offset, flatbuffers.int32[flatbuffers.isLittleEndian ? 0 : 1]);
+  this.writeInt32(offset + 4, flatbuffers.int32[flatbuffers.isLittleEndian ? 1 : 0]);
+};
+
+/**
+ * Look up a field in the vtable, return an offset into the object, or 0 if the
+ * field is not present.
+ *
+ * @param {number} bb_pos
+ * @param {number} vtable_offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.__offset = function(bb_pos, vtable_offset) {
+  var vtable = bb_pos - this.readInt32(bb_pos);
+  return vtable_offset < this.readInt16(vtable) ? this.readInt16(vtable + vtable_offset) : 0;
+};
+
+/**
+ * Initialize any Table-derived type to point to the union at the given offset.
+ *
+ * @param {flatbuffers.Table} t
+ * @param {number} offset
+ * @returns {flatbuffers.Table}
+ */
+flatbuffers.ByteBuffer.prototype.__union = function(t, offset) {
+  t.bb_pos = offset + this.readInt32(offset);
+  t.bb = this;
+  return t;
+};
+
+/**
+ * Create a JavaScript string from UTF-8 data stored inside the FlatBuffer.
+ * This allocates a new string and converts to wide chars upon each access.
+ *
+ * To avoid the conversion to UTF-16, pass flatbuffers.Encoding.UTF8_BYTES as
+ * the "optionalEncoding" argument. This is useful for avoiding conversion to
+ * and from UTF-16 when the data will just be packaged back up in another
+ * FlatBuffer later on.
+ *
+ * @param {number} offset
+ * @param {flatbuffers.Encoding=} opt_encoding Defaults to UTF16_STRING
+ * @returns {string|Uint8Array}
+ */
+flatbuffers.ByteBuffer.prototype.__string = function(offset, opt_encoding) {
+  offset += this.readInt32(offset);
+
+  var length = this.readInt32(offset);
+  var result = '';
+  var i = 0;
+
+  offset += flatbuffers.SIZEOF_INT;
+
+  if (opt_encoding === flatbuffers.Encoding.UTF8_BYTES) {
+    return this.bytes_.subarray(offset, offset + length);
+  }
+
+  while (i < length) {
+    var codePoint;
+
+    // Decode UTF-8
+    var a = this.readUint8(offset + i++);
+    if (a < 0xC0) {
+      codePoint = a;
+    } else {
+      var b = this.readUint8(offset + i++);
+      if (a < 0xE0) {
+        codePoint =
+          ((a & 0x1F) << 6) |
+          (b & 0x3F);
+      } else {
+        var c = this.readUint8(offset + i++);
+        if (a < 0xF0) {
+          codePoint =
+            ((a & 0x0F) << 12) |
+            ((b & 0x3F) << 6) |
+            (c & 0x3F);
+        } else {
+          var d = this.readUint8(offset + i++);
+          codePoint =
+            ((a & 0x07) << 18) |
+            ((b & 0x3F) << 12) |
+            ((c & 0x3F) << 6) |
+            (d & 0x3F);
+        }
+      }
+    }
+
+    // Encode UTF-16
+    if (codePoint < 0x10000) {
+      result += String.fromCharCode(codePoint);
+    } else {
+      codePoint -= 0x10000;
+      result += String.fromCharCode(
+        (codePoint >> 10) + 0xD800,
+        (codePoint & ((1 << 10) - 1)) + 0xDC00);
+    }
+  }
+
+  return result;
+};
+
+/**
+ * Retrieve the relative offset stored at "offset"
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.__indirect = function(offset) {
+  return offset + this.readInt32(offset);
+};
+
+/**
+ * Get the start of data of a vector whose offset is stored at "offset" in this object.
+ *
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.__vector = function(offset) {
+  return offset + this.readInt32(offset) + flatbuffers.SIZEOF_INT; // data starts after the length
+};
+
+/**
+ * Get the length of a vector whose offset is stored at "offset" in this object.
+ *
+ * @param {number} offset
+ * @returns {number}
+ */
+flatbuffers.ByteBuffer.prototype.__vector_len = function(offset) {
+  return this.readInt32(offset + this.readInt32(offset));
+};
+
+/**
+ * @param {string} ident
+ * @returns {boolean}
+ */
+flatbuffers.ByteBuffer.prototype.__has_identifier = function(ident) {
+  if (ident.length != flatbuffers.FILE_IDENTIFIER_LENGTH) {
+    throw new Error('FlatBuffers: file identifier must be length ' +
+                    flatbuffers.FILE_IDENTIFIER_LENGTH);
+  }
+  for (var i = 0; i < flatbuffers.FILE_IDENTIFIER_LENGTH; i++) {
+    if (ident.charCodeAt(i) != this.readInt8(this.position_ + flatbuffers.SIZEOF_INT + i)) {
+      return false;
+    }
+  }
+  return true;
+};
+
+/**
+ * A helper function to avoid generated code depending on this file directly.
+ *
+ * @param {number} low
+ * @param {number} high
+ * @returns {flatbuffers.Long}
+ */
+flatbuffers.ByteBuffer.prototype.createLong = function(low, high) {
+  return flatbuffers.Long.create(low, high);
+};
+
+// Exports for Node.js and RequireJS
+exports.flatbuffers = flatbuffers;
+
+/// @endcond
+/// @}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/closure-compiler-scripts/text-encoding.js
----------------------------------------------------------------------
diff --git a/js/closure-compiler-scripts/text-encoding.js b/js/closure-compiler-scripts/text-encoding.js
new file mode 100644
index 0000000..ca9154f
--- /dev/null
+++ b/js/closure-compiler-scripts/text-encoding.js
@@ -0,0 +1,648 @@
+/**
+ * closure-compiler-friendly text-encoding-utf-8
+ * copied from node_modules/text-encoding-utf-8/lib/encoding.cjs.js
+ * update as needed
+ */
+
+ // This is free and unencumbered software released into the public domain.
+// See LICENSE.md for more information.
+
+//
+// Utilities
+//
+
+goog.module("module$text_encoding");
+goog.module.declareLegacyNamespace();
+/**
+ * @param {number} a The number to test.
+ * @param {number} min The minimum value in the range, inclusive.
+ * @param {number} max The maximum value in the range, inclusive.
+ * @return {boolean} True if a >= min and a <= max.
+ */
+function inRange(a, min, max) {
+  return min <= a && a <= max;
+}
+
+/**
+ * @param {*} o
+ * @return {Object}
+ */
+function ToDictionary(o) {
+  if (o === undefined) return {};
+  if (o === Object(o)) return o;
+  throw TypeError('Could not convert argument to dictionary');
+}
+
+/**
+ * @param {string} string Input string of UTF-16 code units.
+ * @return {!Array.<number>} Code points.
+ */
+function stringToCodePoints(string) {
+  // https://heycam.github.io/webidl/#dfn-obtain-unicode
+
+  // 1. Let S be the DOMString value.
+  var s = String(string);
+
+  // 2. Let n be the length of S.
+  var n = s.length;
+
+  // 3. Initialize i to 0.
+  var i = 0;
+
+  // 4. Initialize U to be an empty sequence of Unicode characters.
+  var u = [];
+
+  // 5. While i < n:
+  while (i < n) {
+
+    // 1. Let c be the code unit in S at index i.
+    var c = s.charCodeAt(i);
+
+    // 2. Depending on the value of c:
+
+    // c < 0xD800 or c > 0xDFFF
+    if (c < 0xD800 || c > 0xDFFF) {
+      // Append to U the Unicode character with code point c.
+      u.push(c);
+    }
+
+    // 0xDC00 ≤ c ≤ 0xDFFF
+    else if (0xDC00 <= c && c <= 0xDFFF) {
+      // Append to U a U+FFFD REPLACEMENT CHARACTER.
+      u.push(0xFFFD);
+    }
+
+    // 0xD800 ≤ c ≤ 0xDBFF
+    else if (0xD800 <= c && c <= 0xDBFF) {
+      // 1. If i = n−1, then append to U a U+FFFD REPLACEMENT
+      // CHARACTER.
+      if (i === n - 1) {
+        u.push(0xFFFD);
+      }
+      // 2. Otherwise, i < n−1:
+      else {
+        // 1. Let d be the code unit in S at index i+1.
+        var d = string.charCodeAt(i + 1);
+
+        // 2. If 0xDC00 ≤ d ≤ 0xDFFF, then:
+        if (0xDC00 <= d && d <= 0xDFFF) {
+          // 1. Let a be c & 0x3FF.
+          var a = c & 0x3FF;
+
+          // 2. Let b be d & 0x3FF.
+          var b = d & 0x3FF;
+
+          // 3. Append to U the Unicode character with code point
+          // 2^16+2^10*a+b.
+          u.push(0x10000 + (a << 10) + b);
+
+          // 4. Set i to i+1.
+          i += 1;
+        }
+
+        // 3. Otherwise, d < 0xDC00 or d > 0xDFFF. Append to U a
+        // U+FFFD REPLACEMENT CHARACTER.
+        else  {
+          u.push(0xFFFD);
+        }
+      }
+    }
+
+    // 3. Set i to i+1.
+    i += 1;
+  }
+
+  // 6. Return U.
+  return u;
+}
+
+/**
+ * @param {!Array.<number>} code_points Array of code points.
+ * @return {string} string String of UTF-16 code units.
+ */
+function codePointsToString(code_points) {
+  var s = '';
+  for (var i = 0; i < code_points.length; ++i) {
+    var cp = code_points[i];
+    if (cp <= 0xFFFF) {
+      s += String.fromCharCode(cp);
+    } else {
+      cp -= 0x10000;
+      s += String.fromCharCode((cp >> 10) + 0xD800,
+                               (cp & 0x3FF) + 0xDC00);
+    }
+  }
+  return s;
+}
+
+
+//
+// Implementation of Encoding specification
+// https://encoding.spec.whatwg.org/
+//
+
+//
+// 3. Terminology
+//
+
+/**
+ * End-of-stream is a special token that signifies no more tokens
+ * are in the stream.
+ * @const
+ */ var end_of_stream = -1;
+
+/**
+ * A stream represents an ordered sequence of tokens.
+ *
+ * @constructor
+ * @param {!(Array.<number>|Uint8Array)} tokens Array of tokens that provide the
+ * stream.
+ */
+function Stream(tokens) {
+  /** @type {!Array.<number>} */
+  this.tokens = [].slice.call(tokens);
+}
+
+Stream.prototype = {
+  /**
+   * @return {boolean} True if end-of-stream has been hit.
+   */
+  endOfStream: function() {
+    return !this.tokens.length;
+  },
+
+  /**
+   * When a token is read from a stream, the first token in the
+   * stream must be returned and subsequently removed, and
+   * end-of-stream must be returned otherwise.
+   *
+   * @return {number} Get the next token from the stream, or
+   * end_of_stream.
+   */
+   read: function() {
+    if (!this.tokens.length)
+      return end_of_stream;
+     return this.tokens.shift();
+   },
+
+  /**
+   * When one or more tokens are prepended to a stream, those tokens
+   * must be inserted, in given order, before the first token in the
+   * stream.
+   *
+   * @param {(number|!Array.<number>)} token The token(s) to prepend to the stream.
+   */
+  prepend: function(token) {
+    if (Array.isArray(token)) {
+      var tokens = /**@type {!Array.<number>}*/(token);
+      while (tokens.length)
+        this.tokens.unshift(tokens.pop());
+    } else {
+      this.tokens.unshift(token);
+    }
+  },
+
+  /**
+   * When one or more tokens are pushed to a stream, those tokens
+   * must be inserted, in given order, after the last token in the
+   * stream.
+   *
+   * @param {(number|!Array.<number>)} token The tokens(s) to prepend to the stream.
+   */
+  push: function(token) {
+    if (Array.isArray(token)) {
+      var tokens = /**@type {!Array.<number>}*/(token);
+      while (tokens.length)
+        this.tokens.push(tokens.shift());
+    } else {
+      this.tokens.push(token);
+    }
+  }
+};
+
+//
+// 4. Encodings
+//
+
+// 4.1 Encoders and decoders
+
+/** @const */
+var finished = -1;
+
+/**
+ * @param {boolean} fatal If true, decoding errors raise an exception.
+ * @param {number=} opt_code_point Override the standard fallback code point.
+ * @return {number} The code point to insert on a decoding error.
+ */
+function decoderError(fatal, opt_code_point) {
+  if (fatal)
+    throw TypeError('Decoder error');
+  return opt_code_point || 0xFFFD;
+}
+
+//
+// 7. API
+//
+
+/** @const */ var DEFAULT_ENCODING = 'utf-8';
+
+// 7.1 Interface TextDecoder
+
+/**
+ * @constructor
+ * @param {string=} encoding The label of the encoding;
+ *     defaults to 'utf-8'.
+ * @param {Object=} options
+ */
+function TextDecoder(encoding, options) {
+  if (!(this instanceof TextDecoder)) {
+    return new TextDecoder(encoding, options);
+  }
+  encoding = encoding !== undefined ? String(encoding).toLowerCase() : DEFAULT_ENCODING;
+  if (encoding !== DEFAULT_ENCODING) {
+    throw new Error('Encoding not supported. Only utf-8 is supported');
+  }
+  options = ToDictionary(options);
+
+  /** @private @type {boolean} */
+  this._streaming = false;
+  /** @private @type {boolean} */
+  this._BOMseen = false;
+  /** @private @type {?Decoder} */
+  this._decoder = null;
+  /** @private @type {boolean} */
+  this._fatal = Boolean(options['fatal']);
+  /** @private @type {boolean} */
+  this._ignoreBOM = Boolean(options['ignoreBOM']);
+
+  Object.defineProperty(this, 'encoding', {value: 'utf-8'});
+  Object.defineProperty(this, 'fatal', {value: this._fatal});
+  Object.defineProperty(this, 'ignoreBOM', {value: this._ignoreBOM});
+}
+
+TextDecoder.prototype = {
+  /**
+   * @param {ArrayBufferView=} input The buffer of bytes to decode.
+   * @param {Object=} options
+   * @return {string} The decoded string.
+   */
+  decode: function decode(input, options) {
+    var bytes;
+    if (typeof input === 'object' && input instanceof ArrayBuffer) {
+      bytes = new Uint8Array(input);
+    } else if (typeof input === 'object' && 'buffer' in input &&
+               input.buffer instanceof ArrayBuffer) {
+      bytes = new Uint8Array(input.buffer,
+                             input.byteOffset,
+                             input.byteLength);
+    } else {
+      bytes = new Uint8Array(0);
+    }
+
+    options = ToDictionary(options);
+
+    if (!this._streaming) {
+      this._decoder = new UTF8Decoder({fatal: this._fatal});
+      this._BOMseen = false;
+    }
+    this._streaming = Boolean(options['stream']);
+
+    var input_stream = new Stream(bytes);
+
+    var code_points = [];
+
+    /** @type {?(number|!Array.<number>)} */
+    var result;
+
+    while (!input_stream.endOfStream()) {
+      result = this._decoder.handler(input_stream, input_stream.read());
+      if (result === finished)
+        break;
+      if (result === null)
+        continue;
+      if (Array.isArray(result))
+        code_points.push.apply(code_points, /**@type {!Array.<number>}*/(result));
+      else
+        code_points.push(result);
+    }
+    if (!this._streaming) {
+      do {
+        result = this._decoder.handler(input_stream, input_stream.read());
+        if (result === finished)
+          break;
+        if (result === null)
+          continue;
+        if (Array.isArray(result))
+          code_points.push.apply(code_points, /**@type {!Array.<number>}*/(result));
+        else
+          code_points.push(result);
+      } while (!input_stream.endOfStream());
+      this._decoder = null;
+    }
+
+    if (code_points.length) {
+      // If encoding is one of utf-8, utf-16be, and utf-16le, and
+      // ignore BOM flag and BOM seen flag are unset, run these
+      // subsubsteps:
+      if (['utf-8'].indexOf(this.encoding) !== -1 &&
+          !this._ignoreBOM && !this._BOMseen) {
+        // If token is U+FEFF, set BOM seen flag.
+        if (code_points[0] === 0xFEFF) {
+          this._BOMseen = true;
+          code_points.shift();
+        } else {
+          // Otherwise, if token is not end-of-stream, set BOM seen
+          // flag and append token to output.
+          this._BOMseen = true;
+        }
+      }
+    }
+
+    return codePointsToString(code_points);
+  }
+};
+
+// 7.2 Interface TextEncoder
+
+/**
+ * @constructor
+ * @param {string=} encoding The label of the encoding;
+ *     defaults to 'utf-8'.
+ * @param {Object=} options
+ */
+function TextEncoder(encoding, options) {
+  if (!(this instanceof TextEncoder))
+    return new TextEncoder(encoding, options);
+  encoding = encoding !== undefined ? String(encoding).toLowerCase() : DEFAULT_ENCODING;
+  if (encoding !== DEFAULT_ENCODING) {
+    throw new Error('Encoding not supported. Only utf-8 is supported');
+  }
+  options = ToDictionary(options);
+
+  /** @private @type {boolean} */
+  this._streaming = false;
+  /** @private @type {?Encoder} */
+  this._encoder = null;
+  /** @private @type {{fatal: boolean}} */
+  this._options = {fatal: Boolean(options['fatal'])};
+
+  Object.defineProperty(this, 'encoding', {value: 'utf-8'});
+}
+
+TextEncoder.prototype = {
+  /**
+   * @param {string=} opt_string The string to encode.
+   * @param {Object=} options
+   * @return {Uint8Array} Encoded bytes, as a Uint8Array.
+   */
+  encode: function encode(opt_string, options) {
+    opt_string = opt_string ? String(opt_string) : '';
+    options = ToDictionary(options);
+
+    // NOTE: This option is nonstandard. None of the encodings
+    // permitted for encoding (i.e. UTF-8, UTF-16) are stateful,
+    // so streaming is not necessary.
+    if (!this._streaming)
+      this._encoder = new UTF8Encoder(this._options);
+    this._streaming = Boolean(options['stream']);
+
+    var bytes = [];
+    var input_stream = new Stream(stringToCodePoints(opt_string));
+    /** @type {?(number|!Array.<number>)} */
+    var result;
+    while (!input_stream.endOfStream()) {
+      result = this._encoder.handler(input_stream, input_stream.read());
+      if (result === finished)
+        break;
+      if (Array.isArray(result))
+        bytes.push.apply(bytes, /**@type {!Array.<number>}*/(result));
+      else
+        bytes.push(result);
+    }
+    if (!this._streaming) {
+      while (true) {
+        result = this._encoder.handler(input_stream, input_stream.read());
+        if (result === finished)
+          break;
+        if (Array.isArray(result))
+          bytes.push.apply(bytes, /**@type {!Array.<number>}*/(result));
+        else
+          bytes.push(result);
+      }
+      this._encoder = null;
+    }
+    return new Uint8Array(bytes);
+  }
+};
+
+//
+// 8. The encoding
+//
+
+// 8.1 utf-8
+
+/**
+ * @constructor
+ * @implements {Decoder}
+ * @param {{fatal: boolean}} options
+ */
+function UTF8Decoder(options) {
+  var fatal = options.fatal;
+
+  // utf-8's decoder's has an associated utf-8 code point, utf-8
+  // bytes seen, and utf-8 bytes needed (all initially 0), a utf-8
+  // lower boundary (initially 0x80), and a utf-8 upper boundary
+  // (initially 0xBF).
+  var /** @type {number} */ utf8_code_point = 0,
+      /** @type {number} */ utf8_bytes_seen = 0,
+      /** @type {number} */ utf8_bytes_needed = 0,
+      /** @type {number} */ utf8_lower_boundary = 0x80,
+      /** @type {number} */ utf8_upper_boundary = 0xBF;
+
+  /**
+   * @param {Stream} stream The stream of bytes being decoded.
+   * @param {number} bite The next byte read from the stream.
+   * @return {?(number|!Array.<number>)} The next code point(s)
+   *     decoded, or null if not enough data exists in the input
+   *     stream to decode a complete code point.
+   */
+  this.handler = function(stream, bite) {
+    // 1. If byte is end-of-stream and utf-8 bytes needed is not 0,
+    // set utf-8 bytes needed to 0 and return error.
+    if (bite === end_of_stream && utf8_bytes_needed !== 0) {
+      utf8_bytes_needed = 0;
+      return decoderError(fatal);
+    }
+
+    // 2. If byte is end-of-stream, return finished.
+    if (bite === end_of_stream)
+      return finished;
+
+    // 3. If utf-8 bytes needed is 0, based on byte:
+    if (utf8_bytes_needed === 0) {
+
+      // 0x00 to 0x7F
+      if (inRange(bite, 0x00, 0x7F)) {
+        // Return a code point whose value is byte.
+        return bite;
+      }
+
+      // 0xC2 to 0xDF
+      if (inRange(bite, 0xC2, 0xDF)) {
+        // Set utf-8 bytes needed to 1 and utf-8 code point to byte
+        // − 0xC0.
+        utf8_bytes_needed = 1;
+        utf8_code_point = bite - 0xC0;
+      }
+
+      // 0xE0 to 0xEF
+      else if (inRange(bite, 0xE0, 0xEF)) {
+        // 1. If byte is 0xE0, set utf-8 lower boundary to 0xA0.
+        if (bite === 0xE0)
+          utf8_lower_boundary = 0xA0;
+        // 2. If byte is 0xED, set utf-8 upper boundary to 0x9F.
+        if (bite === 0xED)
+          utf8_upper_boundary = 0x9F;
+        // 3. Set utf-8 bytes needed to 2 and utf-8 code point to
+        // byte − 0xE0.
+        utf8_bytes_needed = 2;
+        utf8_code_point = bite - 0xE0;
+      }
+
+      // 0xF0 to 0xF4
+      else if (inRange(bite, 0xF0, 0xF4)) {
+        // 1. If byte is 0xF0, set utf-8 lower boundary to 0x90.
+        if (bite === 0xF0)
+          utf8_lower_boundary = 0x90;
+        // 2. If byte is 0xF4, set utf-8 upper boundary to 0x8F.
+        if (bite === 0xF4)
+          utf8_upper_boundary = 0x8F;
+        // 3. Set utf-8 bytes needed to 3 and utf-8 code point to
+        // byte − 0xF0.
+        utf8_bytes_needed = 3;
+        utf8_code_point = bite - 0xF0;
+      }
+
+      // Otherwise
+      else {
+        // Return error.
+        return decoderError(fatal);
+      }
+
+      // Then (byte is in the range 0xC2 to 0xF4) set utf-8 code
+      // point to utf-8 code point << (6 × utf-8 bytes needed) and
+      // return continue.
+      utf8_code_point = utf8_code_point << (6 * utf8_bytes_needed);
+      return null;
+    }
+
+    // 4. If byte is not in the range utf-8 lower boundary to utf-8
+    // upper boundary, run these substeps:
+    if (!inRange(bite, utf8_lower_boundary, utf8_upper_boundary)) {
+
+      // 1. Set utf-8 code point, utf-8 bytes needed, and utf-8
+      // bytes seen to 0, set utf-8 lower boundary to 0x80, and set
+      // utf-8 upper boundary to 0xBF.
+      utf8_code_point = utf8_bytes_needed = utf8_bytes_seen = 0;
+      utf8_lower_boundary = 0x80;
+      utf8_upper_boundary = 0xBF;
+
+      // 2. Prepend byte to stream.
+      stream.prepend(bite);
+
+      // 3. Return error.
+      return decoderError(fatal);
+    }
+
+    // 5. Set utf-8 lower boundary to 0x80 and utf-8 upper boundary
+    // to 0xBF.
+    utf8_lower_boundary = 0x80;
+    utf8_upper_boundary = 0xBF;
+
+    // 6. Increase utf-8 bytes seen by one and set utf-8 code point
+    // to utf-8 code point + (byte − 0x80) << (6 × (utf-8 bytes
+    // needed − utf-8 bytes seen)).
+    utf8_bytes_seen += 1;
+    utf8_code_point += (bite - 0x80) << (6 * (utf8_bytes_needed - utf8_bytes_seen));
+
+    // 7. If utf-8 bytes seen is not equal to utf-8 bytes needed,
+    // continue.
+    if (utf8_bytes_seen !== utf8_bytes_needed)
+      return null;
+
+    // 8. Let code point be utf-8 code point.
+    var code_point = utf8_code_point;
+
+    // 9. Set utf-8 code point, utf-8 bytes needed, and utf-8 bytes
+    // seen to 0.
+    utf8_code_point = utf8_bytes_needed = utf8_bytes_seen = 0;
+
+    // 10. Return a code point whose value is code point.
+    return code_point;
+  };
+}
+
+/**
+ * @constructor
+ * @implements {Encoder}
+ * @param {{fatal: boolean}} options
+ */
+function UTF8Encoder(options) {
+  var fatal = options.fatal;
+  /**
+   * @param {Stream} stream Input stream.
+   * @param {number} code_point Next code point read from the stream.
+   * @return {(number|!Array.<number>)} Byte(s) to emit.
+   */
+  this.handler = function(stream, code_point) {
+    // 1. If code point is end-of-stream, return finished.
+    if (code_point === end_of_stream)
+      return finished;
+
+    // 2. If code point is in the range U+0000 to U+007F, return a
+    // byte whose value is code point.
+    if (inRange(code_point, 0x0000, 0x007f))
+      return code_point;
+
+    // 3. Set count and offset based on the range code point is in:
+    var count, offset;
+    // U+0080 to U+07FF:    1 and 0xC0
+    if (inRange(code_point, 0x0080, 0x07FF)) {
+      count = 1;
+      offset = 0xC0;
+    }
+    // U+0800 to U+FFFF:    2 and 0xE0
+    else if (inRange(code_point, 0x0800, 0xFFFF)) {
+      count = 2;
+      offset = 0xE0;
+    }
+    // U+10000 to U+10FFFF: 3 and 0xF0
+    else if (inRange(code_point, 0x10000, 0x10FFFF)) {
+      count = 3;
+      offset = 0xF0;
+    }
+
+    // 4.Let bytes be a byte sequence whose first byte is (code
+    // point >> (6 × count)) + offset.
+    var bytes = [(code_point >> (6 * count)) + offset];
+
+    // 5. Run these substeps while count is greater than 0:
+    while (count > 0) {
+
+      // 1. Set temp to code point >> (6 × (count − 1)).
+      var temp = code_point >> (6 * (count - 1));
+
+      // 2. Append to bytes 0x80 | (temp & 0x3F).
+      bytes.push(0x80 | (temp & 0x3F));
+
+      // 3. Decrease count by one.
+      count -= 1;
+    }
+
+    // 6. Return bytes bytes, in order.
+    return bytes;
+  };
+}
+
+exports.TextEncoder = TextEncoder;
+exports.TextDecoder = TextDecoder;

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/closure-compiler-scripts/tslib.js
----------------------------------------------------------------------
diff --git a/js/closure-compiler-scripts/tslib.js b/js/closure-compiler-scripts/tslib.js
new file mode 100644
index 0000000..b5a722a
--- /dev/null
+++ b/js/closure-compiler-scripts/tslib.js
@@ -0,0 +1,151 @@
+/**
+ * closure-compiler-friendly tslib
+ * copied from node_modules/tslib/tslib.js
+ * update as needed
+ */
+
+var extendStatics = Object.setPrototypeOf ||
+    ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+    function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
+
+function __extends(d, b) {
+    extendStatics(d, b);
+    function __() { this.constructor = d; }
+    d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+};
+
+var __assign = Object.assign || function (t) {
+    for (var s, i = 1, n = arguments.length; i < n; i++) {
+        s = arguments[i];
+        for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
+    }
+    return t;
+};
+
+function __rest(s, e) {
+    var t = {};
+    for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
+        t[p] = s[p];
+    if (s != null && typeof Object.getOwnPropertySymbols === "function")
+        for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) if (e.indexOf(p[i]) < 0)
+            t[p[i]] = s[p[i]];
+    return t;
+};
+
+function __decorate(decorators, target, key, desc) {
+    var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
+    if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
+    else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
+    return c > 3 && r && Object.defineProperty(target, key, r), r;
+};
+
+function __param(paramIndex, decorator) {
+    return function (target, key) { decorator(target, key, paramIndex); }
+};
+
+function __metadata(metadataKey, metadataValue) {
+    if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
+};
+
+function __awaiter(thisArg, _arguments, P, generator) {
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+
+function __generator(thisArg, body) {
+    var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+    return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+    function verb(n) { return function (v) { return step([n, v]); }; }
+    function step(op) {
+        if (f) throw new TypeError("Generator is already executing.");
+        while (_) try {
+            if (f = 1, y && (t = y[op[0] & 2 ? "return" : op[0] ? "throw" : "next"]) && !(t = t.call(y, op[1])).done) return t;
+            if (y = 0, t) op = [0, t.value];
+            switch (op[0]) {
+                case 0: case 1: t = op; break;
+                case 4: _.label++; return { value: op[1], done: false };
+                case 5: _.label++; y = op[1]; op = [0]; continue;
+                case 7: op = _.ops.pop(); _.trys.pop(); continue;
+                default:
+                    if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+                    if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+                    if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+                    if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+                    if (t[2]) _.ops.pop();
+                    _.trys.pop(); continue;
+            }
+            op = body.call(thisArg, _);
+        } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+        if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+    }
+};
+
+function __exportStar(m, exports) {
+    for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
+};
+
+function __values(o) {
+    var m = typeof Symbol === "function" && o[Symbol.iterator], i = 0;
+    if (m) return m.call(o);
+    return {
+        next: function () {
+            if (o && i >= o.length) o = void 0;
+            return { value: o && o[i++], done: !o };
+        }
+    };
+};
+
+function __read(o, n) {
+    var m = typeof Symbol === "function" && o[Symbol.iterator];
+    if (!m) return o;
+    var i = m.call(o), r, ar = [], e;
+    try {
+        while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
+    }
+    catch (error) { e = { error: error }; }
+    finally {
+        try {
+            if (r && !r.done && (m = i["return"])) m.call(i);
+        }
+        finally { if (e) throw e.error; }
+    }
+    return ar;
+};
+
+function __spread() {
+    for (var ar = [], i = 0; i < arguments.length; i++)
+        ar = ar.concat(__read(arguments[i]));
+    return ar;
+};
+
+function __await(v) {
+    return this instanceof __await ? (this.v = v, this) : new __await(v);
+};
+
+function __asyncGenerator(thisArg, _arguments, generator) {
+    if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+    var g = generator.apply(thisArg, _arguments || []), i, q = [];
+    return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+    function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+    function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+    function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r);  }
+    function fulfill(value) { resume("next", value); }
+    function reject(value) { resume("throw", value); }
+    function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+};
+
+function __asyncDelegator(o) {
+    var i, p;
+    return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
+    function verb(n, f) { if (o[n]) i[n] = function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; }; }
+};
+
+function __asyncValues(o) {
+    if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+    var m = o[Symbol.asyncIterator];
+    return m ? m.call(o) : typeof __values === "function" ? __values(o) : o[Symbol.iterator]();
+};

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/examples/read_file.html
----------------------------------------------------------------------
diff --git a/js/examples/read_file.html b/js/examples/read_file.html
index 5a650a0..2a1ebab 100644
--- a/js/examples/read_file.html
+++ b/js/examples/read_file.html
@@ -21,7 +21,7 @@ under the License.
 
 <html>
   <head>
-    <title>arrow.js browser test</title>
+    <title>Arrow.js browser test</title>
     <meta charset="utf-8">
     <style>
 table {
@@ -33,30 +33,41 @@ table, th, td {
     </style>
     <script type="text/javascript">
 var reader = new FileReader();
-function addCell (tr, type, name) {
+function addCell (tr, type, value) {
   var td = document.createElement(type)
-  td.textContent = name;
+  td.textContent = value;
   tr.appendChild(td);
 }
 reader.onload = function (evt) {
-  var reader = new arrow.getReader(new Uint8Array(evt.target.result));
-  var schema = reader.getSchema();
-  var length = reader.loadNextBatch();
-console.log(JSON.stringify(schema, null, '\t'));
 
+  var arrowTable = Arrow.Table.from(new Uint8Array(evt.target.result));
   var thead = document.getElementById("thead");
   var tbody = document.getElementById("tbody");
-  var header_row = document.createElement("tr");
 
-  schema.forEach(function (d) {
-    addCell(header_row, "th", d.name);
-  });
+  while (thead.hasChildNodes()) {
+      thead.removeChild(thead.lastChild);
+  }
+
+  while (tbody.hasChildNodes()) {
+      tbody.removeChild(tbody.lastChild);
+  }
+
+  var header_row = document.createElement("tr");
+  for (let column of arrowTable.cols()) {
+    addCell(header_row, "th", column.name);
+  }
 
   thead.appendChild(header_row);
 
-  for (var i = 0; i < length; i += 1|0) {
+  for (let row of arrowTable.rows(true)) {
     var tr = document.createElement("tr");
-    schema.forEach(function (d) { addCell(tr, "td", reader.getVector(d.name).get(i)); });
+    for (let cell of row) {
+      addCell(tr, "td",
+        cell == null ? 'null'
+        : !Array.isArray(cell) ? cell
+        : '[' + cell.map((value) => value == null ? 'null' : value).join(', ') + ']'
+      );
+    }
     tbody.appendChild(tr);
   }
 }
@@ -74,6 +85,6 @@ function handleFiles(files) {
       <tbody id="tbody">
       </tbody>
     </table>
-    <script type="text/javascript" src="../_bundles/arrow.js"></script>
+    <script type="text/javascript" src="../dist/Arrow.js"></script>
   </body>
 </html>

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/flatbuffers.sh
----------------------------------------------------------------------
diff --git a/js/flatbuffers.sh b/js/flatbuffers.sh
deleted file mode 100755
index 0f8e3f9..0000000
--- a/js/flatbuffers.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-echo "Compiling flatbuffer schemas..."
-mkdir -p lib lib-esm
-DIR=`mktemp -d`
-flatc -o $DIR --js ../format/*.fbs
-cat $DIR/*_generated.js > src/Arrow_generated.js
-
-# Duplicate in the tsc-generated outputs - we can't make tsc pull in .js files
-# and still prooduce declaration files
-cat $DIR/*_generated.js > lib/Arrow_generated.js
-cat $DIR/*_generated.js > lib-esm/Arrow_generated.js
-rm -rf $DIR

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/gulpfile.js
----------------------------------------------------------------------
diff --git a/js/gulpfile.js b/js/gulpfile.js
new file mode 100644
index 0000000..90c45b7
--- /dev/null
+++ b/js/gulpfile.js
@@ -0,0 +1,285 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+const del = require(`del`);
+const gulp = require(`gulp`);
+const path = require(`path`);
+const pump = require(`pump`);
+const ts = require(`gulp-typescript`);
+const streamMerge = require(`merge2`);
+const sourcemaps = require(`gulp-sourcemaps`);
+const child_process = require(`child_process`);
+const gulpJsonTransform = require(`gulp-json-transform`);
+const closureCompiler = require(`google-closure-compiler`).gulp();
+
+const knownTargets = [`es5`, `es2015`, `esnext`];
+const knownModules = [`cjs`, `esm`, `cls`, `umd`];
+
+// see: https://github.com/google/closure-compiler/blob/c1372b799d94582eaf4b507a4a22558ff26c403c/src/com/google/javascript/jscomp/CompilerOptions.java#L2988
+const gCCTargets = {
+    es5: `ECMASCRIPT5`,
+    es2015: `ECMASCRIPT_2015`,
+    es2016: `ECMASCRIPT_2016`,
+    es2017: `ECMASCRIPT_2017`,
+    esnext: `ECMASCRIPT_NEXT`
+};
+
+const tsProjects = [];
+const argv = require(`command-line-args`)([
+    { name: `all`, alias: `a`, type: Boolean },
+    { name: 'update', alias: 'u', type: Boolean },
+    { name: 'verbose', alias: 'v', type: Boolean },
+    { name: `target`, type: String, defaultValue: `` },
+    { name: `module`, type: String, defaultValue: `` },
+    { name: `coverage`, type: Boolean, defaultValue: false },
+    { name: `targets`, alias: `t`, type: String, multiple: true, defaultValue: [] },
+    { name: `modules`, alias: `m`, type: String, multiple: true, defaultValue: [] }
+]);
+
+const { targets, modules } = argv;
+
+argv.target && !targets.length && targets.push(argv.target);
+argv.module && !modules.length && modules.push(argv.module);
+(argv.all || !targets.length) && targets.push(`all`);
+(argv.all || !modules.length) && modules.push(`all`);
+
+for (const [target, format] of combinations([`all`, `all`])) {
+    const combo = `${target}:${format}`;
+    gulp.task(`test:${combo}`, gulp.series(testTask(target, format, combo, `targets/${target}/${format}`)));
+    gulp.task(`clean:${combo}`, gulp.series(cleanTask(target, format, combo, `targets/${target}/${format}`)));
+    gulp.task(`build:${combo}`, gulp.series(buildTask(target, format, combo, `targets/${target}/${format}`)));
+    gulp.task(`bundle:${combo}`, gulp.series(bundleTask(target, format, combo, `targets/${target}/${format}`)));
+    gulp.task(`test:debug:${combo}`, gulp.series(testTask(target, format, combo, `targets/${target}/${format}`, true)));
+}
+
+gulp.task(`test`, gulp.series(runTaskCombos(`test`)));
+gulp.task(`clean`, gulp.parallel(runTaskCombos(`clean`)));
+gulp.task(`build`, gulp.parallel(runTaskCombos(`bundle`)));
+gulp.task(`test:debug`, gulp.series(runTaskCombos(`test:debug`)));
+gulp.task(`default`, gulp.task(`build`));
+
+function runTaskCombos(name) {
+    const combos = [];
+    for (const [target, format] of combinations(targets, modules)) {
+        if (format === `cls`) {
+            continue;
+        }
+        combos.push(`${name}:${target}:${format}`);
+    }
+    return combos;
+}
+
+function cleanTask(target, format, taskName, outDir) {
+    return () => {
+        const globs = [`${outDir}/**`];
+        if (target === `es5` && format === `cjs`) {
+            globs.push(`typings`);
+        }
+        return del(globs);
+    };
+}
+
+function buildTask(target, format, taskName, outDir) {
+    return format === `umd`
+        ? closureTask(target, format, taskName, outDir)
+        : typescriptTask(target, format, taskName, outDir);
+}
+
+function bundleTask(target, format, taskName, outDir) {
+    return [
+        [`build:${taskName}`],
+        (cb) => streamMerge([
+            pump(gulp.src([`LICENSE`, `README.md`, `CHANGELOG.md`]), gulp.dest(outDir)),
+            pump(
+                gulp.src(`package.json`),
+                gulpJsonTransform((orig) => [
+                    `version`, `description`,
+                    `author`, `homepage`, `bugs`, `license`,
+                    `keywords`, `repository`, `peerDependencies`
+                ].reduce((copy, key) => (
+                    (copy[key] = orig[key]) && copy || copy
+                ), {
+                    main: `Arrow.js`,
+                    typings: `Arrow.d.ts`,
+                    name: `@apache-arrow/${target}-${format}`
+                }), 2),
+                gulp.dest(outDir),
+                onError
+            )
+        ])
+    ];
+}
+
+function testTask(target, format, taskName, outDir, debug) {
+    const jestOptions = !debug ? [] : [
+        `--runInBand`, `--env`, `jest-environment-node-debug`];
+    argv.update && jestOptions.unshift(`-u`);
+    argv.verbose && jestOptions.unshift(`--verbose`);
+    argv.coverage && jestOptions.unshift(`--coverage`);
+    const jestPath = `./node_modules/.bin/jest`;
+    const debugOpts = jestOptions.join(' ');
+    const spawnOptions = {
+        stdio: [`ignore`, `inherit`, `inherit`],
+        env: Object.assign({}, process.env, {
+            TEST_TARGET: target, TEST_MODULE: format
+        })
+    };
+    return () => !debug ?
+        child_process.spawn(jestPath, jestOptions, spawnOptions) :
+        child_process.exec(`node --inspect-brk ${jestPath} ${debugOpts}`, spawnOptions);
+}
+
+function closureTask(target, format, taskName, outDir) {
+    const clsTarget = `es5`;
+    const googleRoot = `targets/${clsTarget}/cls`;
+    const languageIn = clsTarget === `es5` ? `es2015` : clsTarget;
+    return [
+        [`clean:${taskName}`, `build:${clsTarget}:cls`],
+        () => {
+            return streamMerge([
+                closureStream(closureSrcs(false), `Arrow`, onError, true),
+                closureStream(closureSrcs(true), `Arrow.internal`, onError)
+            ])
+            .on('end', () => del([`targets/${target}/cls/**`]));
+        }
+    ];
+    function closureSrcs(isInternal) {
+        return gulp.src([
+            `closure-compiler-scripts/*.js`,
+            `${googleRoot}/**/*.js`,
+            `!${googleRoot}/format/*.js`,
+            `!${googleRoot}/Arrow.externs.js`,
+            `!${googleRoot}/Arrow${isInternal ? `` : `.internal`}.js`
+        ], { base: `./` });
+    }
+    function closureStream(sources, entry, onError, copyToDist) {
+        const streams = [
+            sources,
+            sourcemaps.init(),
+            closureCompiler(closureArgs(entry)),
+            sourcemaps.write('.'),
+            gulp.dest(outDir)
+        ];
+        // copy the UMD bundle to dist
+        if (target === `es5` && copyToDist) {
+            streams.push(gulp.dest(`dist`));
+        }
+        return pump(...streams, onError);
+    }
+    function closureArgs(entry) {
+        return {
+            third_party: true,
+            externs: `${googleRoot}/Arrow.externs.js`,
+            warning_level: `QUIET`,
+            dependency_mode: `LOOSE`,
+            rewrite_polyfills: false,
+            // formatting: `PRETTY_PRINT`,
+            compilation_level: `ADVANCED`,
+            assume_function_wrapper: true,
+            js_output_file: `${entry}.js`,
+            language_in: gCCTargets[languageIn],
+            language_out: gCCTargets[clsTarget],
+            entry_point: `${googleRoot}/${entry}.js`,
+            output_wrapper:
+`// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+(function (global, factory) {
+    typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
+    typeof define === 'function' && define.amd ? define(['exports'], factory) :
+    (factory(global.Arrow = global.Arrow || {}));
+}(this, (function (exports) {%output%}.bind(this))));`
+        };
+    }
+}
+
+function typescriptTask(target, format, taskName, outDir) {
+    return [
+        [`clean:${taskName}`],
+        () => {
+            const tsconfigPath = `tsconfig/tsconfig.${target}.${format}.json`;
+            let { js, dts } = tsProjects.find((p) => p.target === target && p.format === format) || {};
+            if (!js || !dts) {
+                let tsProject = ts.createProject(tsconfigPath);
+                ({ js, dts } = pump(
+                    tsProject.src(),
+                    sourcemaps.init(),
+                    tsProject(ts.reporter.fullReporter(true)),
+                    onError
+                ));
+                dts = [dts, gulp.dest(outDir)];
+                js = [js, sourcemaps.write(), gulp.dest(outDir)];
+                // copy types to the root
+                if (target === `es5` && format === `cjs`) {
+                    dts.push(gulp.dest(`typings`));
+                }
+                tsProjects.push({
+                    target, format, 
+                    js: js = pump(...js, onError),
+                    dts: dts = pump(...dts, onError)
+                });
+            }
+            return streamMerge([ dts, js ]);
+        }
+    ];
+}
+
+function* combinations(_targets, _modules) {
+
+    const targets = known(knownTargets, _targets || [`all`]);
+    const modules = known(knownModules, _modules || [`all`]);
+
+    for (const format of modules) {
+        for (const target of targets) {
+            yield [target, format];
+        }
+    }
+
+    function known(known, values) {
+        return ~values.indexOf(`all`)
+            ? known
+            : Object.keys(
+                values.reduce((map, arg) => ((
+                    (known.indexOf(arg) !== -1) &&
+                    (map[arg.toLowerCase()] = true)
+                    || true) && map
+                ), {})
+            ).sort((a, b) => known.indexOf(a) - known.indexOf(b));
+    }
+}
+
+function onError(err) {
+    if (typeof err === 'number') {
+        process.exit(err);
+    } else if (err) {
+        console.error(err.stack || err.toString());
+        process.exit(1);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/lerna.json
----------------------------------------------------------------------
diff --git a/js/lerna.json b/js/lerna.json
new file mode 100644
index 0000000..c8fb8c0
--- /dev/null
+++ b/js/lerna.json
@@ -0,0 +1,9 @@
+{
+  "lerna": "2.0.0",
+  "version": "0.1.1",
+  "packages": [
+    "targets/es5/*",
+    "targets/es2015/*",
+    "targets/esnext/*"
+  ]
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/package.json
----------------------------------------------------------------------
diff --git a/js/package.json b/js/package.json
index e46b5bd..8998221 100644
--- a/js/package.json
+++ b/js/package.json
@@ -1,29 +1,140 @@
 {
-  "name": "arrow",
-  "version": "0.0.0",
-  "description": "",
-  "main": "lib/arrow.js",
+  "name": "apache-arrow",
+  "version": "0.1.2",
+  "main": "./targets/es5/cjs/Arrow.js",
+  "module": "./targets/es5/esm/Arrow.js",
+  "browser": "./targets/es5/umd/Arrow.js",
+  "jsnext:main": "./targets/es2015/esm/Arrow.js",
+  "esnext:main": "./targets/esnext/esm/Arrow.js",
+  "typings": "./typings/Arrow.d.ts",
+  "description": "Apache Arrow columnar in-memory format",
   "scripts": {
-    "build": "./flatbuffers.sh && tsc && tsc -m es6 --outDir lib-esm && webpack",
-    "clean": "rm -rf lib lib-esm _bundles",
-    "test": "./node_modules/mocha/bin/mocha ./spec/arrow.js",
-    "lint": "./node_modules/tslint/bin/tslint"
+    "lerna": "lerna",
+    "commit": "git-cz",
+    "test": "gulp test",
+    "build": "gulp build",
+    "clean": "gulp clean",
+    "perf": "node ./perf/index.js",
+    "test:debug": "gulp test:debug",
+    "test:coverage": "gulp test -t esnext -m esm --coverage",
+    "validate": "npm-run-all lint build test",
+    "lerna:publish": "lerna exec --bail=false npm publish",
+    "prepublishOnly": "sh ./prepublish.sh",
+    "commitmsg": "validate-commit-msg",
+    "doc": "shx rm -rf ./doc && esdoc",
+    "lint": "npm-run-all -p lint:*",
+    "lint:src": "tslint --fix --type-check -p tsconfig.json -c tslint.json \"src/**/*.ts\"",
+    "lint:test": "tslint --fix --type-check -p test/tsconfig.json -c tslint.json \"test/**/*.ts\""
   },
-  "author": "",
-  "repository": "https://github.com/apache/arrow/",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/apache/arrow.git"
+  },
+  "keywords": [
+    "apache",
+    "arrow"
+  ],
+  "author": "Apache Software Foundation",
   "license": "Apache-2.0",
-  "devDependencies": {
-    "awesome-typescript-loader": "^3.1.3",
-    "chai": "^3.5.0",
-    "mocha": "^3.3.0",
-    "tslint": "^5.3.2",
-    "typescript": "^2.3.2",
-    "uglifyjs-webpack-plugin": "^0.4.3",
-    "webpack": "^2.3.3"
+  "bugs": {
+    "url": "https://issues.apache.org/jira/projects/ARROW"
+  },
+  "homepage": "https://github.com/apache/arrow/blob/master/js/README.md",
+  "files": [
+    "src",
+    "dist",
+    "typings",
+    "targets",
+    "LICENSE",
+    "README.md"
+  ],
+  "peerDependencies": {
+    "tslib": "^1.7.1"
   },
   "dependencies": {
-    "flatbuffers": "^1.5.0",
-    "text-encoding": "^0.6.4",
-    "commander": "^2.9.0"
+    "command-line-args": "~4.0.7",
+    "command-line-usage": "~4.0.1",
+    "flatbuffers": "~1.7.0",
+    "text-encoding": "~0.6.4"
+  },
+  "devDependencies": {
+    "@types/flatbuffers": "~1.6.4",
+    "@types/jest": "~20.0.8",
+    "@types/node": "~8.0.24",
+    "@types/text-encoding": "0.0.32",
+    "benchmark": "~2.1.4",
+    "commitizen": "~2.9.6",
+    "conventional-changelog-cli": "~1.3.2",
+    "conventional-commits-detector": "~0.1.1",
+    "conventional-github-releaser": "~1.1.12",
+    "conventional-recommended-bump": "~1.0.1",
+    "coveralls": "~2.13.1",
+    "cz-conventional-changelog": "~2.0.0",
+    "del": "~3.0.0",
+    "esdoc": "~1.0.1",
+    "esdoc-standard-plugin": "~1.0.0",
+    "google-closure-compiler": "~20170806.0.0",
+    "gulp": "github:gulpjs/gulp#4.0",
+    "gulp-json-transform": "~0.4.2",
+    "gulp-sourcemaps": "~2.6.1",
+    "gulp-typescript": "~3.2.2",
+    "jest": "~20.0.4",
+    "jest-environment-node-debug": "~2.0.0",
+    "json": "~9.0.6",
+    "lerna": "2.0.0",
+    "lint-staged": "~4.0.1",
+    "merge2": "~1.1.0",
+    "mkdirp": "~0.5.1",
+    "npm-run-all": "~4.0.2",
+    "pump": "~1.0.2",
+    "rimraf": "~2.6.1",
+    "shx": "~0.2.2",
+    "text-encoding-utf-8": "~1.0.1",
+    "trash": "~4.0.1",
+    "ts-jest": "~20.0.10",
+    "tslib": "~1.7.1",
+    "tslint": "~5.6.0",
+    "typescript": "~2.4.2",
+    "validate-commit-msg": "~2.14.0"
+  },
+  "config": {
+    "commitizen": {
+      "path": "cz-conventional-changelog"
+    }
+  },
+  "lint-staged": {
+    "*.@(ts)": [
+      "tslint --fix",
+      "git add"
+    ]
+  },
+  "jest": {
+    "verbose": false,
+    "globals": {
+      "ts-jest": {
+        "tsConfigFile": "test/tsconfig.json"
+      }
+    },
+    "roots": [
+      "<rootDir>/test/"
+    ],
+    "moduleFileExtensions": [
+      "js",
+      "ts",
+      "tsx"
+    ],
+    "mapCoverage": true,
+    "coverageReporters": [
+      "lcov"
+    ],
+    "coveragePathIgnorePatterns": [
+      "format\\/(File|Message|Schema|Tensor)_generated\\.(js|ts)$",
+      "test\\/.*\\.(ts|tsx|js)$"
+    ],
+    "transform": {
+      ".(ts|tsx)": "<rootDir>/node_modules/ts-jest/preprocessor.js",
+      ".(js|jsx)": "<rootDir>/node_modules/babel-jest/build/index.js"
+    },
+    "testRegex": "(.*(-|\\.)(test|spec)s?)\\.(ts|tsx|js)$"
   }
 }

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/file/dictionary.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/file/dictionary.arrow b/js/perf/arrows/file/dictionary.arrow
new file mode 100644
index 0000000..34d41db
Binary files /dev/null and b/js/perf/arrows/file/dictionary.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/file/simple.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/file/simple.arrow b/js/perf/arrows/file/simple.arrow
new file mode 100644
index 0000000..838db6d
Binary files /dev/null and b/js/perf/arrows/file/simple.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/file/struct.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/file/struct.arrow b/js/perf/arrows/file/struct.arrow
new file mode 100644
index 0000000..3d2c018
Binary files /dev/null and b/js/perf/arrows/file/struct.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/multi/count/records.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/multi/count/records.arrow b/js/perf/arrows/multi/count/records.arrow
new file mode 100644
index 0000000..00d8837
Binary files /dev/null and b/js/perf/arrows/multi/count/records.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/multi/count/schema.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/multi/count/schema.arrow b/js/perf/arrows/multi/count/schema.arrow
new file mode 100644
index 0000000..dfd24e9
Binary files /dev/null and b/js/perf/arrows/multi/count/schema.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/multi/latlong/records.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/multi/latlong/records.arrow b/js/perf/arrows/multi/latlong/records.arrow
new file mode 100644
index 0000000..563d12d
Binary files /dev/null and b/js/perf/arrows/multi/latlong/records.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/multi/latlong/schema.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/multi/latlong/schema.arrow b/js/perf/arrows/multi/latlong/schema.arrow
new file mode 100644
index 0000000..638b2ab
Binary files /dev/null and b/js/perf/arrows/multi/latlong/schema.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/multi/origins/records.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/multi/origins/records.arrow b/js/perf/arrows/multi/origins/records.arrow
new file mode 100644
index 0000000..49a8c40
Binary files /dev/null and b/js/perf/arrows/multi/origins/records.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/multi/origins/schema.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/multi/origins/schema.arrow b/js/perf/arrows/multi/origins/schema.arrow
new file mode 100644
index 0000000..0d10fb0
Binary files /dev/null and b/js/perf/arrows/multi/origins/schema.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/stream/dictionary.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/stream/dictionary.arrow b/js/perf/arrows/stream/dictionary.arrow
new file mode 100644
index 0000000..17ca48b
Binary files /dev/null and b/js/perf/arrows/stream/dictionary.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/stream/simple.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/stream/simple.arrow b/js/perf/arrows/stream/simple.arrow
new file mode 100644
index 0000000..2c68c0e
Binary files /dev/null and b/js/perf/arrows/stream/simple.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/arrows/stream/struct.arrow
----------------------------------------------------------------------
diff --git a/js/perf/arrows/stream/struct.arrow b/js/perf/arrows/stream/struct.arrow
new file mode 100644
index 0000000..4e97b70
Binary files /dev/null and b/js/perf/arrows/stream/struct.arrow differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/0c8853f9/js/perf/config.js
----------------------------------------------------------------------
diff --git a/js/perf/config.js b/js/perf/config.js
new file mode 100644
index 0000000..4fbcda3
--- /dev/null
+++ b/js/perf/config.js
@@ -0,0 +1,38 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+const fs = require('fs');
+const path = require('path');
+const arrowFormats = ['file', 'stream'];
+const arrowFileNames = ['simple', 'struct', 'dictionary'];
+const multipartArrows = ['count', 'latlong', 'origins'];
+let arrowTestConfigurations = [];
+
+arrowTestConfigurations = multipartArrows.reduce((configs, folder) => {
+    const schemaPath = path.resolve(__dirname, `./arrows/multi/${folder}/schema.arrow`);
+    const recordsPath = path.resolve(__dirname, `./arrows/multi/${folder}/records.arrow`);
+    return [...configs, [`multipart ${folder}`, fs.readFileSync(schemaPath), fs.readFileSync(recordsPath)]];
+}, arrowTestConfigurations);
+
+arrowTestConfigurations = arrowFormats.reduce((configs, format) => {
+    return arrowFileNames.reduce((configs, name) => {
+        const arrowPath = path.resolve(__dirname, `./arrows/${format}/${name}.arrow`);
+        return [...configs, [`${name} ${format}`, fs.readFileSync(arrowPath)]];
+    }, configs);
+}, arrowTestConfigurations);
+
+module.exports = arrowTestConfigurations;