You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@arrow.apache.org by we...@apache.org on 2017/09/08 22:42:19 UTC

[4/7] arrow git commit: ARROW-1479: [JS] Expand JavaScript implementation

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/perf/index.js
----------------------------------------------------------------------
diff --git a/js/perf/index.js b/js/perf/index.js
new file mode 100644
index 0000000..669f690
--- /dev/null
+++ b/js/perf/index.js
@@ -0,0 +1,113 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// Use the ES5 UMD target as perf baseline
+// ES6/7 iterators are faster in turbofan, but something about the
+// ES5 transpilation (rewriting let and const to var?) JITs better
+const { Table, readBuffers } = require('../dist/Arrow');
+// const { Table, readBuffers } = require('../targets/es5/cjs');
+// const { Table, readBuffers } = require('../targets/es2015/cjs');
+// const { Table, readBuffers } = require('../targets/esnext/cjs');
+
+const Benchmark = require('benchmark');
+const arrowTestConfigurations = require('./config');
+
+const suites = [];
+
+for (let [name, ...buffers] of arrowTestConfigurations) {
+    const parseSuite = new Benchmark.Suite(`Parse ${name}`, { async: true });
+    const sliceSuite = new Benchmark.Suite(`Slice ${name} vectors`, { async: true });
+    const iterateSuite = new Benchmark.Suite(`Iterate ${name} vectors`, { async: true });
+    const getByIndexSuite = new Benchmark.Suite(`Get ${name} values by index`, { async: true });
+    parseSuite.add(createFromTableTest(name, buffers));
+    parseSuite.add(createReadBuffersTest(name, buffers));
+    for (const vector of Table.from(...buffers).cols()) {
+        sliceSuite.add(createSliceTest(vector));
+        iterateSuite.add(createIterateTest(vector));
+        getByIndexSuite.add(createGetByIndexTest(vector));
+    }
+    suites.push(parseSuite, sliceSuite, getByIndexSuite, iterateSuite);
+}
+
+console.log('Running apache-arrow performance tests...\n');
+
+run();
+
+function run() {
+    var suite = suites.shift();
+    suite && suite.on('complete', function() {
+        console.log(suite.name + ':\n' + this.map(function(x) {
+            var str = x.toString();
+            var meanMsPerOp = Math.round(x.stats.mean * 100000)/100;
+            var sliceOf60FPS = Math.round((meanMsPerOp / (1000/60)) * 100000)/1000;
+            return `${str} (avg: ${meanMsPerOp}ms, or ${sliceOf60FPS}% of a frame @ 60FPS) ${x.suffix || ''}`;
+        }).join('\n') + '\n');
+        if (suites.length > 0) {
+            setTimeout(run, 1000);
+        }
+    })
+    .run({ async: true });
+}
+
+function createFromTableTest(name, buffers) {
+    let table;
+    return {
+        async: true,
+        name: `Table.from`,
+        fn() { table = Table.from(...buffers); }
+    };
+}
+
+function createReadBuffersTest(name, buffers) {
+    let vectors;
+    return {
+        async: true,
+        name: `readBuffers`,
+        fn() { for (vectors of readBuffers(...buffers)) {} }
+    };
+}
+
+function createSliceTest(vector) {
+    let xs;
+    return {
+        async: true,
+        name: `name: '${vector.name}', length: ${vector.length}, type: ${vector.type}`,
+        fn() { xs = vector.slice(); }
+    };
+}
+
+function createIterateTest(vector) {
+    let value;
+    return {
+        async: true,
+        name: `name: '${vector.name}', length: ${vector.length}, type: ${vector.type}`,
+        fn() { for (value of vector) {} }
+    };
+}
+
+function createGetByIndexTest(vector) {
+    let value;
+    return {
+        async: true,
+        name: `name: '${vector.name}', length: ${vector.length}, type: ${vector.type}`,
+        fn() {
+            for (let i = -1, n = vector.length; ++i < n;) {
+                value = vector.get(i);
+            }
+        }
+    };
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/prepublish.sh
----------------------------------------------------------------------
diff --git a/js/prepublish.sh b/js/prepublish.sh
new file mode 100644
index 0000000..4ad8db1
--- /dev/null
+++ b/js/prepublish.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+npm run lint
+npm run build
+npm run test
+preset=`conventional-commits-detector` && echo $preset
+bump=`conventional-recommended-bump -p $preset` && echo $bump
+npm --no-git-tag-version version $bump &>/dev/null
+npm run lerna:publish
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/spec/arrow.js
----------------------------------------------------------------------
diff --git a/js/spec/arrow.js b/js/spec/arrow.js
deleted file mode 100644
index 52c586b..0000000
--- a/js/spec/arrow.js
+++ /dev/null
@@ -1,179 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-var fs = require('fs');
-var chai = require('chai');
-var assert = chai.assert;
-var path= require('path');
-var arrow = require('../lib/arrow.js');
-
-test_files = [
-  {
-    name: 'simple',
-    batches: 1,
-    fields: [
-      {
-        "name": "foo",
-        "type": "Int",
-        "data": [[1, null, 3, 4, 5]]
-      },
-      {
-        "name": "bar",
-        "type": "FloatingPoint",
-        "data": [[1.0, null, null, 4.0, 5.0]]
-      },
-      {
-        "name": "baz",
-        "type": "Utf8",
-        "data": [["aa", null, null, "bbb", "cccc"]]
-      }
-    ]
-  },
-  {
-    name: 'struct_example',
-    batches: 2,
-    fields: [
-      {
-        "name": "struct_nullable",
-        "type": "Struct",
-        "data": [
-          [
-            null,
-            [null, 'MhRNxD4'],
-            [137773603, '3F9HBxK'],
-            [410361374, 'aVd88fp'],
-            null,
-            [null, '3loZrRf'],
-            null
-          ], [
-            null,
-            [null,null],
-            [null,null],
-            null,
-            [null, '78SLiRw'],
-            null,
-            null,
-            [null, '0ilsf82'],
-            [null, 'LjS9MbU'],
-            [null, null],
-          ]
-        ]
-      }
-    ]
-  },
-  {
-    name: 'dictionary',
-    batches: 2,
-    fields: [
-      {
-        "name": "example-csv",
-        "type": "Struct",
-        "data": [
-          [
-            ["Hermione", 25, new Float32Array([-53.235599517822266, 40.231998443603516])],
-            ["Severus", 30, new Float32Array([-62.22999954223633, 3])],
-          ], [
-            ["Harry", 20, new Float32Array([23, -100.23652648925781])]
-          ]
-        ]
-      }
-    ]
-  },
-];
-
-var buf;
-
-function makeSchemaChecks(fields) {
-  describe('schema', function () {
-    var schema;
-    beforeEach(function () {
-      schema = arrow.getSchema(buf);
-    });
-
-    it('should read the number of fields', function () {
-        assert.lengthOf(schema, fields.length);
-    });
-
-    it("should understand fields", function () {
-      for (i = 0; i < fields.length; i += 1|0) {
-          assert.equal(schema[i].name, fields[i].name);
-          assert.equal(schema[i].type, fields[i].type,
-                       'bad type for field ' + schema[i].name);
-      }
-    });
-  });
-}
-
-function makeDataChecks (batches, fields) {
-  describe('data', function() {
-    var reader;
-    beforeEach(function () {
-        reader = arrow.getReader(buf)
-    });
-    it('should read the correct number of record batches', function () {
-        assert.equal(reader.getBatchCount(), batches);
-    });
-    fields.forEach(function (field, i) {
-      it('should read ' + field.type + ' vector ' + field.name, function () {
-        for (var batch_idx = 0; batch_idx < batches; batch_idx += 1|0) {
-          reader.loadNextBatch();
-          var batch = field.data[batch_idx];
-          var vector = reader.getVector(field.name)
-          assert.isDefined(vector, "vector " + field.name);
-          assert.lengthOf(vector, batch.length, "vector " + field.name)
-          for (i = 0; i < vector.length; i += 1|0) {
-            if (field.type == "Date") {
-              assert.equal(vector.get(i).getTime(), batch[i].getTime(),
-                           "vector " + field.name + " index " + i);
-            } else {
-              assert.deepEqual(vector.get(i), batch[i],
-                               "vector " + field.name + " index " + i);
-            }
-          }
-        }
-      });
-    });
-  });
-}
-
-describe('arrow random-access file', function () {
-  test_files.forEach(function (test_file) {
-    describe(test_file.name, function () {
-      var fields = test_file.fields
-      beforeEach(function () {
-        buf = fs.readFileSync(path.resolve(__dirname, test_file.name + '.arrow'));
-      });
-
-      makeSchemaChecks(fields);
-      makeDataChecks(test_file.batches, fields);
-    })
-  });
-});
-
-describe('arrow streaming file format', function () {
-  test_files.forEach(function (test_file) {
-    describe(test_file.name, function () {
-      var fields = test_file.fields
-      beforeEach(function () {
-        buf = fs.readFileSync(path.resolve(__dirname, test_file.name + '-stream.arrow'));
-      });
-
-      makeSchemaChecks(fields);
-      makeDataChecks(test_file.batches, fields);
-    })
-  });
-});

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/spec/dictionary-stream.arrow
----------------------------------------------------------------------
diff --git a/js/spec/dictionary-stream.arrow b/js/spec/dictionary-stream.arrow
deleted file mode 100644
index 17ca48b..0000000
Binary files a/js/spec/dictionary-stream.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/spec/dictionary.arrow
----------------------------------------------------------------------
diff --git a/js/spec/dictionary.arrow b/js/spec/dictionary.arrow
deleted file mode 100644
index 34d41db..0000000
Binary files a/js/spec/dictionary.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/spec/simple-stream.arrow
----------------------------------------------------------------------
diff --git a/js/spec/simple-stream.arrow b/js/spec/simple-stream.arrow
deleted file mode 100644
index 2c68c0e..0000000
Binary files a/js/spec/simple-stream.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/spec/simple.arrow
----------------------------------------------------------------------
diff --git a/js/spec/simple.arrow b/js/spec/simple.arrow
deleted file mode 100644
index 838db6d..0000000
Binary files a/js/spec/simple.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/spec/struct_example-stream.arrow
----------------------------------------------------------------------
diff --git a/js/spec/struct_example-stream.arrow b/js/spec/struct_example-stream.arrow
deleted file mode 100644
index 4e97b70..0000000
Binary files a/js/spec/struct_example-stream.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/spec/struct_example.arrow
----------------------------------------------------------------------
diff --git a/js/spec/struct_example.arrow b/js/spec/struct_example.arrow
deleted file mode 100644
index 3d2c018..0000000
Binary files a/js/spec/struct_example.arrow and /dev/null differ

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/Arrow.externs.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow.externs.ts b/js/src/Arrow.externs.ts
new file mode 100644
index 0000000..7342684
--- /dev/null
+++ b/js/src/Arrow.externs.ts
@@ -0,0 +1,67 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+/**
+ * @fileoverview Closure Compiler externs for Arrow
+ * @externs
+ * @suppress {duplicate,checkTypes}
+ */
+/** @type {symbol} */
+Symbol.iterator;
+/** @type {symbol} */
+Symbol.asyncIterator;
+let Table = function() {};
+/** @type {?} */
+Table.prototype.rows;
+/** @type {?} */
+Table.prototype.cols;
+/** @type {?} */
+Table.prototype.getRow;
+/** @type {?} */
+Table.prototype.getCell;
+/** @type {?} */
+Table.prototype.getCellAt;
+/** @type {?} */
+Table.prototype.getColumn;
+/** @type {?} */
+Table.prototype.getColumnAt;
+/** @type {?} */
+Table.prototype.toString;
+
+let Vector = function() {};
+/** @type {?} */
+Vector.prototype.length;
+/** @type {?} */
+Vector.prototype.name;
+/** @type {?} */
+Vector.prototype.type;
+/** @type {?} */
+Vector.prototype.props;
+/** @type {?} */
+Vector.prototype.get;
+/** @type {?} */
+Vector.prototype.concat;
+/** @type {?} */
+Vector.prototype.slice;
+
+let TypedVector = function() {};
+/** @type {?} */
+TypedVector.prototype.arrayType;
+
+let ValidityVector = function() {};
+/** @type {?} */
+(<any> ValidityVector).pack;

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/Arrow.internal.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow.internal.ts b/js/src/Arrow.internal.ts
new file mode 100644
index 0000000..d8f0c37
--- /dev/null
+++ b/js/src/Arrow.internal.ts
@@ -0,0 +1,105 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Vector as Vector_ } from './vector/vector';
+import { StructVector as StructVector_ } from './vector/struct';
+import { DictionaryVector as DictionaryVector_ } from './vector/dictionary';
+import { ListVector as ListVector_, Utf8Vector as Utf8Vector_, FixedSizeListVector as FixedSizeListVector_ } from './vector/list';
+import {
+    TypedVector as TypedVector_, BitVector as BitVector_,
+    DateVector as DateVector_, IndexVector as IndexVector_,
+    Int8Vector as Int8Vector_, Int16Vector as Int16Vector_,
+    Int32Vector as Int32Vector_, Int64Vector as Int64Vector_,
+    Uint8Vector as Uint8Vector_, Uint16Vector as Uint16Vector_,
+    Uint32Vector as Uint32Vector_, Uint64Vector as Uint64Vector_,
+    Float32Vector as Float32Vector_, Float64Vector as Float64Vector_,
+} from './vector/typed';
+
+export const vectors = {
+    Vector: Vector_,
+    BitVector: BitVector_,
+    ListVector: ListVector_,
+    Utf8Vector: Utf8Vector_,
+    DateVector: DateVector_,
+    IndexVector: IndexVector_,
+    TypedVector: TypedVector_,
+    Int8Vector: Int8Vector_,
+    Int16Vector: Int16Vector_,
+    Int32Vector: Int32Vector_,
+    Int64Vector: Int64Vector_,
+    Uint8Vector: Uint8Vector_,
+    Uint16Vector: Uint16Vector_,
+    Uint32Vector: Uint32Vector_,
+    Uint64Vector: Uint64Vector_,
+    Float32Vector: Float32Vector_,
+    Float64Vector: Float64Vector_,
+    StructVector: StructVector_,
+    DictionaryVector: DictionaryVector_,
+    FixedSizeListVector: FixedSizeListVector_,
+};
+
+export namespace vectors {
+    export type Vector<T> =  Vector_<T>;
+    export type BitVector =  BitVector_;
+    export type ListVector<T> =  ListVector_<T>;
+    export type Utf8Vector =  Utf8Vector_;
+    export type DateVector =  DateVector_;
+    export type IndexVector =  IndexVector_;
+    export type Int8Vector =  Int8Vector_;
+    export type Int16Vector =  Int16Vector_;
+    export type Int32Vector =  Int32Vector_;
+    export type Int64Vector =  Int64Vector_;
+    export type Uint8Vector =  Uint8Vector_;
+    export type Uint16Vector =  Uint16Vector_;
+    export type Uint32Vector =  Uint32Vector_;
+    export type Uint64Vector =  Uint64Vector_;
+    export type Float32Vector =  Float32Vector_;
+    export type Float64Vector =  Float64Vector_;
+    export type StructVector =  StructVector_;
+    export type DictionaryVector<T> =  DictionaryVector_<T>;
+    export type FixedSizeListVector<T> =  FixedSizeListVector_<T>;
+    export type TypedVector<T, TArray> =  TypedVector_<T, TArray>;
+}
+
+/* These exports are needed for the closure umd targets */
+try {
+    const Arrow = eval('exports');
+    if (typeof Arrow === 'object') {
+        // string indexers tell closure compiler not to rename these properties
+        Arrow['vectors'] = {};
+        Arrow['vectors']['Vector'] = Vector_;
+        Arrow['vectors']['BitVector'] = BitVector_;
+        Arrow['vectors']['ListVector'] = ListVector_;
+        Arrow['vectors']['Utf8Vector'] = Utf8Vector_;
+        Arrow['vectors']['DateVector'] = DateVector_;
+        Arrow['vectors']['IndexVector'] = IndexVector_;
+        Arrow['vectors']['Int8Vector'] = Int8Vector_;
+        Arrow['vectors']['Int16Vector'] = Int16Vector_;
+        Arrow['vectors']['Int32Vector'] = Int32Vector_;
+        Arrow['vectors']['Int64Vector'] = Int64Vector_;
+        Arrow['vectors']['Uint8Vector'] = Uint8Vector_;
+        Arrow['vectors']['Uint16Vector'] = Uint16Vector_;
+        Arrow['vectors']['Uint32Vector'] = Uint32Vector_;
+        Arrow['vectors']['Uint64Vector'] = Uint64Vector_;
+        Arrow['vectors']['Float32Vector'] = Float32Vector_;
+        Arrow['vectors']['Float64Vector'] = Float64Vector_;
+        Arrow['vectors']['StructVector'] = StructVector_;
+        Arrow['vectors']['DictionaryVector'] = DictionaryVector_;
+        Arrow['vectors']['FixedSizeListVector'] = FixedSizeListVector_;
+    }
+} catch (e) { /* not the UMD bundle */ }
+/** end closure exports */

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/Arrow.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow.ts b/js/src/Arrow.ts
new file mode 100644
index 0000000..fe19645
--- /dev/null
+++ b/js/src/Arrow.ts
@@ -0,0 +1,31 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { Table } from './table';
+import { readBuffers } from './reader/arrow';
+export { Table, readBuffers };
+
+/* These exports are needed for the closure umd targets */
+try {
+    const Arrow = eval('exports');
+    if (typeof Arrow === 'object') {
+        // string indexers tell closure compiler not to rename these properties
+        Arrow['Table'] = Table;
+        Arrow['readBuffers'] = readBuffers;
+    }
+} catch (e) { /* not the UMD bundle */ }
+/** end closure exports */

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/Arrow_generated.d.ts
----------------------------------------------------------------------
diff --git a/js/src/Arrow_generated.d.ts b/js/src/Arrow_generated.d.ts
deleted file mode 100644
index 1f5b454..0000000
--- a/js/src/Arrow_generated.d.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-export var org: {
-  apache: {
-    arrow: any
-  }
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/arrow.ts
----------------------------------------------------------------------
diff --git a/js/src/arrow.ts b/js/src/arrow.ts
deleted file mode 100644
index ac87a8c..0000000
--- a/js/src/arrow.ts
+++ /dev/null
@@ -1,515 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import { flatbuffers } from "flatbuffers";
-import { org } from "./Arrow_generated";
-import { Vector, vectorFromField } from "./types";
-
-import ByteBuffer = flatbuffers.ByteBuffer;
-const Footer = org.apache.arrow.flatbuf.Footer;
-const Message = org.apache.arrow.flatbuf.Message;
-const MessageHeader = org.apache.arrow.flatbuf.MessageHeader;
-const RecordBatch = org.apache.arrow.flatbuf.RecordBatch;
-const DictionaryBatch = org.apache.arrow.flatbuf.DictionaryBatch;
-const Schema = org.apache.arrow.flatbuf.Schema;
-const Type = org.apache.arrow.flatbuf.Type;
-const VectorType = org.apache.arrow.flatbuf.VectorType;
-
-export class ArrowReader {
-
-    private bb;
-    private schema: any = [];
-    private vectors: Vector[];
-    private vectorMap: any = {};
-    private dictionaries: any = {};
-    private batches: any = [];
-    private batchIndex: number = 0;
-
-    constructor(bb, schema, vectors: Vector[], batches, dictionaries) {
-        this.bb = bb;
-        this.schema = schema;
-        this.vectors = vectors;
-        for (let i = 0; i < vectors.length; i++) {
-            this.vectorMap[vectors[i].name] = vectors[i];
-        }
-        this.batches = batches;
-        this.dictionaries = dictionaries;
-    }
-
-    public loadNextBatch() {
-        if (this.batchIndex < this.batches.length) {
-            const batch = this.batches[this.batchIndex];
-            this.batchIndex += 1;
-            loadVectors(this.bb, this.vectors, batch);
-            return batch.length;
-        } else {
-            return 0;
-        }
-    }
-
-    public getSchema() {
-        return this.schema;
-    }
-
-    public getVectors() {
-        return this.vectors;
-    }
-
-    public getVector(name) {
-        return this.vectorMap[name];
-    }
-
-    public getBatchCount() {
-        return this.batches.length;
-    }
-
-    // the index of the next batch to be loaded
-    public getBatchIndex() {
-        return this.batchIndex;
-    }
-
-    // set the index of the next batch to be loaded
-    public setBatchIndex(i: number) {
-        this.batchIndex = i;
-    }
-}
-
-export function getSchema(buf) { return getReader(buf).getSchema(); }
-
-export function getReader(buf): ArrowReader {
-    if (_checkMagic(buf, 0)) {
-        return getFileReader(buf);
-    } else {
-        return getStreamReader(buf);
-    }
-}
-
-export function getStreamReader(buf): ArrowReader {
-    const bb = new ByteBuffer(buf);
-
-    const schema = _loadSchema(bb);
-    let field;
-    const vectors: Vector[] = [];
-    let i;
-    let iLen;
-    let batch;
-    const recordBatches = [];
-    const dictionaryBatches = [];
-    const dictionaries = {};
-
-    for (i = 0, iLen = schema.fieldsLength(); i < iLen; i++) {
-        field = schema.fields(i);
-        _createDictionaryVectors(field, dictionaries);
-        vectors.push(vectorFromField(field, dictionaries));
-    }
-
-    while (bb.position() < bb.capacity()) {
-      batch = _loadBatch(bb);
-      if (batch == null) {
-          break;
-      } else if (batch.type === MessageHeader.DictionaryBatch) {
-          dictionaryBatches.push(batch);
-      } else if (batch.type === MessageHeader.RecordBatch) {
-          recordBatches.push(batch);
-      } else {
-          throw new Error("Expected batch type" + MessageHeader.RecordBatch + " or " +
-              MessageHeader.DictionaryBatch + " but got " + batch.type);
-      }
-    }
-
-    // load dictionary vectors
-    for (i = 0; i < dictionaryBatches.length; i++) {
-      batch = dictionaryBatches[i];
-      loadVectors(bb, [dictionaries[batch.id]], batch);
-    }
-
-    return new ArrowReader(bb, parseSchema(schema), vectors, recordBatches, dictionaries);
-}
-
-export function getFileReader(buf): ArrowReader {
-    const bb = new ByteBuffer(buf);
-
-    const footer = _loadFooter(bb);
-
-    const schema = footer.schema();
-    let i;
-    let len;
-    let field;
-    const vectors: Vector[] = [];
-    let block;
-    let batch;
-    const recordBatchBlocks = [];
-    const dictionaryBatchBlocks = [];
-    const dictionaries = {};
-
-    for (i = 0, len = schema.fieldsLength(); i < len; i++) {
-        field = schema.fields(i);
-        _createDictionaryVectors(field, dictionaries);
-        vectors.push(vectorFromField(field, dictionaries));
-    }
-
-    for (i = 0; i < footer.dictionariesLength(); i++) {
-        block = footer.dictionaries(i);
-        dictionaryBatchBlocks.push({
-            bodyLength: block.bodyLength().low,
-            metaDataLength: block.metaDataLength(),
-            offset: block.offset().low,
-        });
-    }
-
-    for (i = 0; i < footer.recordBatchesLength(); i++) {
-        block = footer.recordBatches(i);
-        recordBatchBlocks.push({
-            bodyLength: block.bodyLength().low,
-            metaDataLength: block.metaDataLength(),
-            offset: block.offset().low,
-        });
-    }
-
-    const dictionaryBatches = dictionaryBatchBlocks.map((batchBlock) => {
-        bb.setPosition(batchBlock.offset);
-        // TODO: Make sure this is a dictionary batch
-        return _loadBatch(bb);
-    });
-
-    const recordBatches = recordBatchBlocks.map((batchBlock) => {
-        bb.setPosition(batchBlock.offset);
-        // TODO: Make sure this is a record batch
-        return _loadBatch(bb);
-    });
-
-    // load dictionary vectors
-    for (i = 0; i < dictionaryBatches.length; i++) {
-        batch = dictionaryBatches[i];
-        loadVectors(bb, [dictionaries[batch.id]], batch);
-    }
-
-    return new ArrowReader(bb, parseSchema(schema), vectors, recordBatches, dictionaries);
-}
-
-function _loadFooter(bb) {
-    const fileLength: number = bb.bytes_.length;
-
-    if (fileLength < MAGIC.length * 2 + 4) {
-      throw new Error("file too small " + fileLength);
-    }
-
-    if (!_checkMagic(bb.bytes_, 0)) {
-      throw new Error("missing magic bytes at beginning of file");
-    }
-
-    if (!_checkMagic(bb.bytes_, fileLength - MAGIC.length)) {
-      throw new Error("missing magic bytes at end of file");
-    }
-
-    const footerLengthOffset: number = fileLength - MAGIC.length - 4;
-    bb.setPosition(footerLengthOffset);
-    const footerLength: number = Int32FromByteBuffer(bb, footerLengthOffset);
-
-    if (footerLength <= 0 || footerLength + MAGIC.length * 2 + 4 > fileLength)  {
-      throw new Error("Invalid footer length: " + footerLength);
-    }
-
-    const footerOffset: number = footerLengthOffset - footerLength;
-    bb.setPosition(footerOffset);
-    const footer = Footer.getRootAsFooter(bb);
-
-    return footer;
-}
-
-function _loadSchema(bb) {
-    const message = _loadMessage(bb);
-    if (message.headerType() !== MessageHeader.Schema) {
-        throw new Error("Expected header type " + MessageHeader.Schema + " but got " + message.headerType());
-    }
-    return message.header(new Schema());
-}
-
-function _loadBatch(bb) {
-    const message = _loadMessage(bb);
-    if (message == null) {
-        return;
-    } else if (message.headerType() === MessageHeader.RecordBatch) {
-        const batch = { header: message.header(new RecordBatch()), length: message.bodyLength().low };
-        return _loadRecordBatch(bb, batch);
-    } else if (message.headerType() === MessageHeader.DictionaryBatch) {
-        const batch = { header: message.header(new DictionaryBatch()), length: message.bodyLength().low };
-        return _loadDictionaryBatch(bb, batch);
-    } else {
-        throw new Error("Expected header type " + MessageHeader.RecordBatch + " or " + MessageHeader.DictionaryBatch +
-            " but got " + message.headerType());
-    }
-}
-
-function _loadRecordBatch(bb, batch) {
-    const data = batch.header;
-    let i;
-    const nodesLength = data.nodesLength();
-    const nodes = new Array(nodesLength);
-    let buffer;
-    const buffersLength = data.buffersLength();
-    const buffers = new Array(buffersLength);
-
-    for (i = 0; i < nodesLength; i += 1) {
-        nodes[i] = data.nodes(i);
-    }
-
-    for (i = 0; i < buffersLength; i += 1) {
-        buffer = data.buffers(i);
-        buffers[i] = {
-            length: buffer.length().low,
-            offset: bb.position() + buffer.offset().low,
-        };
-    }
-    // position the buffer after the body to read the next message
-    bb.setPosition(bb.position() + batch.length);
-
-    return { nodes, buffers, length: data.length().low, type: MessageHeader.RecordBatch };
-}
-
-function _loadDictionaryBatch(bb, batch) {
-    const id = batch.header.id().toFloat64().toString();
-    const data = batch.header.data();
-    let i;
-    const nodesLength = data.nodesLength();
-    const nodes = new Array(nodesLength);
-    let buffer;
-    const buffersLength = data.buffersLength();
-    const buffers = new Array(buffersLength);
-
-    for (i = 0; i < nodesLength; i += 1) {
-        nodes[i] = data.nodes(i);
-    }
-    for (i = 0; i < buffersLength; i += 1) {
-        buffer = data.buffers(i);
-        buffers[i] = {
-            length: buffer.length().low,
-            offset: bb.position() + buffer.offset().low,
-        };
-    }
-    // position the buffer after the body to read the next message
-    bb.setPosition(bb.position() + batch.length);
-
-    return {
-        buffers,
-        id,
-        length: data.length().low,
-        nodes,
-        type: MessageHeader.DictionaryBatch,
-    };
-}
-
-function _loadMessage(bb) {
-    const messageLength: number = Int32FromByteBuffer(bb, bb.position());
-    if (messageLength === 0) {
-      return;
-    }
-    bb.setPosition(bb.position() + 4);
-    const message = Message.getRootAsMessage(bb);
-    // position the buffer at the end of the message so it's ready to read further
-    bb.setPosition(bb.position() + messageLength);
-
-    return message;
-}
-
-function _createDictionaryVectors(field, dictionaries) {
-    const encoding = field.dictionary();
-    if (encoding != null) {
-        const id = encoding.id().toFloat64().toString();
-        if (dictionaries[id] == null) {
-            // create a field for the dictionary
-            const dictionaryField = _createDictionaryField(id, field);
-            dictionaries[id] = vectorFromField(dictionaryField, null);
-        }
-    }
-
-    // recursively examine child fields
-    for (let i = 0, len = field.childrenLength(); i < len; i++) {
-        _createDictionaryVectors(field.children(i), dictionaries);
-    }
-}
-
-function _createDictionaryField(id, field) {
-    const builder = new flatbuffers.Builder();
-    const nameOffset = builder.createString("dict-" + id);
-
-    const typeType = field.typeType();
-    let typeOffset;
-    if (typeType === Type.Int) {
-        const type = field.type(new org.apache.arrow.flatbuf.Int());
-        org.apache.arrow.flatbuf.Int.startInt(builder);
-        org.apache.arrow.flatbuf.Int.addBitWidth(builder, type.bitWidth());
-        org.apache.arrow.flatbuf.Int.addIsSigned(builder, type.isSigned());
-        typeOffset = org.apache.arrow.flatbuf.Int.endInt(builder);
-    } else if (typeType === Type.FloatingPoint) {
-        const type = field.type(new org.apache.arrow.flatbuf.FloatingPoint());
-        org.apache.arrow.flatbuf.FloatingPoint.startFloatingPoint(builder);
-        org.apache.arrow.flatbuf.FloatingPoint.addPrecision(builder, type.precision());
-        typeOffset = org.apache.arrow.flatbuf.FloatingPoint.endFloatingPoint(builder);
-    } else if (typeType === Type.Utf8) {
-        org.apache.arrow.flatbuf.Utf8.startUtf8(builder);
-        typeOffset = org.apache.arrow.flatbuf.Utf8.endUtf8(builder);
-    } else if (typeType === Type.Date) {
-        const type = field.type(new org.apache.arrow.flatbuf.Date());
-        org.apache.arrow.flatbuf.Date.startDate(builder);
-        org.apache.arrow.flatbuf.Date.addUnit(builder, type.unit());
-        typeOffset = org.apache.arrow.flatbuf.Date.endDate(builder);
-    } else {
-        throw new Error("Unimplemented dictionary type " + typeType);
-    }
-    if (field.childrenLength() > 0) {
-      throw new Error("Dictionary encoded fields can't have children");
-    }
-    const childrenOffset = org.apache.arrow.flatbuf.Field.createChildrenVector(builder, []);
-
-    let layout;
-    const layoutOffsets = [];
-    for (let i = 0, len = field.layoutLength(); i < len; i++) {
-        layout = field.layout(i);
-        org.apache.arrow.flatbuf.VectorLayout.startVectorLayout(builder);
-        org.apache.arrow.flatbuf.VectorLayout.addBitWidth(builder, layout.bitWidth());
-        org.apache.arrow.flatbuf.VectorLayout.addType(builder, layout.type());
-        layoutOffsets.push(org.apache.arrow.flatbuf.VectorLayout.endVectorLayout(builder));
-    }
-    const layoutOffset = org.apache.arrow.flatbuf.Field.createLayoutVector(builder, layoutOffsets);
-
-    org.apache.arrow.flatbuf.Field.startField(builder);
-    org.apache.arrow.flatbuf.Field.addName(builder, nameOffset);
-    org.apache.arrow.flatbuf.Field.addNullable(builder, field.nullable());
-    org.apache.arrow.flatbuf.Field.addTypeType(builder, typeType);
-    org.apache.arrow.flatbuf.Field.addType(builder, typeOffset);
-    org.apache.arrow.flatbuf.Field.addChildren(builder, childrenOffset);
-    org.apache.arrow.flatbuf.Field.addLayout(builder, layoutOffset);
-    const offset = org.apache.arrow.flatbuf.Field.endField(builder);
-    builder.finish(offset);
-
-    return org.apache.arrow.flatbuf.Field.getRootAsField(builder.bb);
-}
-
-function Int32FromByteBuffer(bb, offset) {
-    return ((bb.bytes_[offset + 3] & 255) << 24) |
-           ((bb.bytes_[offset + 2] & 255) << 16) |
-           ((bb.bytes_[offset + 1] & 255) << 8) |
-           ((bb.bytes_[offset] & 255));
-}
-
-const MAGIC_STR = "ARROW1";
-const MAGIC = new Uint8Array(MAGIC_STR.length);
-for (let i = 0; i < MAGIC_STR.length; i++) {
-    MAGIC[i] = MAGIC_STR.charCodeAt(i);
-}
-
-function _checkMagic(buf, index) {
-    for (let i = 0; i < MAGIC.length; i++) {
-        if (MAGIC[i] !== buf[index + i]) {
-            return false;
-        }
-    }
-    return true;
-}
-
-const TYPEMAP = {};
-TYPEMAP[Type.NONE]          = "NONE";
-TYPEMAP[Type.Null]          = "Null";
-TYPEMAP[Type.Int]           = "Int";
-TYPEMAP[Type.FloatingPoint] = "FloatingPoint";
-TYPEMAP[Type.Binary]        = "Binary";
-TYPEMAP[Type.Utf8]          = "Utf8";
-TYPEMAP[Type.Bool]          = "Bool";
-TYPEMAP[Type.Decimal]       = "Decimal";
-TYPEMAP[Type.Date]          = "Date";
-TYPEMAP[Type.Time]          = "Time";
-TYPEMAP[Type.Timestamp]     = "Timestamp";
-TYPEMAP[Type.Interval]      = "Interval";
-TYPEMAP[Type.List]          = "List";
-TYPEMAP[Type.FixedSizeList] = "FixedSizeList";
-TYPEMAP[Type.Struct_]       = "Struct";
-TYPEMAP[Type.Union]         = "Union";
-
-const VECTORTYPEMAP = {};
-VECTORTYPEMAP[VectorType.OFFSET]   = "OFFSET";
-VECTORTYPEMAP[VectorType.DATA]     = "DATA";
-VECTORTYPEMAP[VectorType.VALIDITY] = "VALIDITY";
-VECTORTYPEMAP[VectorType.TYPE]     = "TYPE";
-
-function parseField(field) {
-    const children = [];
-    for (let i = 0; i < field.childrenLength(); i++) {
-        children.push(parseField(field.children(i)));
-    }
-
-    const layouts = [];
-    for (let i = 0; i < field.layoutLength(); i++) {
-        layouts.push(VECTORTYPEMAP[field.layout(i).type()]);
-    }
-
-    return {
-      children,
-      layout: layouts,
-      name: field.name(),
-      nullable: field.nullable(),
-      type: TYPEMAP[field.typeType()],
-    };
-}
-
-function parseSchema(schema) {
-    const result = [];
-    for (let i = 0, len = schema.fieldsLength(); i < len; i++) {
-        result.push(parseField(schema.fields(i)));
-    }
-    return result;
-}
-
-function loadVectors(bb, vectors: Vector[], recordBatch) {
-    const indices = { bufferIndex: 0, nodeIndex: 0 };
-    for (const vector of vectors) {
-        loadVector(bb, vector, recordBatch, indices);
-    }
-}
-
-/**
- * Loads a vector with data from a batch
- *   recordBatch: { nodes: org.apache.arrow.flatbuf.FieldNode[], buffers: { offset: number, length: number }[] }
- */
-function loadVector(bb, vector: Vector, recordBatch, indices) {
-    const node = recordBatch.nodes[indices.nodeIndex];
-    let ownBuffersLength;
-    const ownBuffers = [];
-    let i;
-    indices.nodeIndex += 1;
-
-    // dictionary vectors are always ints, so will have a data vector plus optional null vector
-    if (vector.field.dictionary() == null) {
-        ownBuffersLength = vector.field.layoutLength();
-    } else if (vector.field.nullable()) {
-        ownBuffersLength = 2;
-    } else {
-        ownBuffersLength = 1;
-    }
-
-    for (i = 0; i < ownBuffersLength; i += 1) {
-        ownBuffers.push(recordBatch.buffers[indices.bufferIndex + i]);
-    }
-    indices.bufferIndex += ownBuffersLength;
-
-    vector.loadData(bb, node, ownBuffers);
-
-    const children = vector.getChildVectors();
-    for (i = 0; i < children.length; i++) {
-        loadVector(bb, children[i], recordBatch, indices);
-    }
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/bitarray.ts
----------------------------------------------------------------------
diff --git a/js/src/bitarray.ts b/js/src/bitarray.ts
deleted file mode 100644
index 6b0a91a..0000000
--- a/js/src/bitarray.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-export class BitArray {
-    private view: Uint8Array;
-
-    constructor(buffer: ArrayBuffer, offset: number, length: number) {
-        this.view = new Uint8Array(buffer, offset || 0, Math.ceil(length / 8));
-    }
-
-    public get(i) {
-        const index = (i >> 3) | 0; // | 0 converts to an int. Math.floor works too.
-        const bit = i % 8;  // i % 8 is just as fast as i & 7
-        return (this.view[index] & (1 << bit)) !== 0;
-    }
-
-    public set(i) {
-        const index = (i >> 3) | 0;
-        const bit = i % 8;
-        this.view[index] |= 1 << bit;
-    }
-
-    public unset(i) {
-        const index = (i >> 3) | 0;
-        const bit = i % 8;
-        this.view[index] &= ~(1 << bit);
-    }
-}

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/format/File_generated.ts
----------------------------------------------------------------------
diff --git a/js/src/format/File_generated.ts b/js/src/format/File_generated.ts
new file mode 100644
index 0000000..d0b774a
--- /dev/null
+++ b/js/src/format/File_generated.ts
@@ -0,0 +1,240 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+import { flatbuffers } from 'flatbuffers';
+import * as NS16187549871986683199 from './Schema_generated';
+/**
+ * ----------------------------------------------------------------------
+ * Arrow File metadata
+ *
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Footer {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Footer}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Footer {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Footer=} obj
+     * @returns {Footer}
+     */
+    static getRootAsFooter(bb: flatbuffers.ByteBuffer, obj?: Footer): Footer {
+      return (obj || new Footer).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.MetadataVersion}
+     */
+    version(): NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.MetadataVersion} */ (this.bb.readInt16(this.bb_pos + offset)) : NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1;
+    }
+
+    /**
+     * @param {org.apache.arrow.flatbuf.Schema=} obj
+     * @returns {org.apache.arrow.flatbuf.Schema|null}
+     */
+    schema(obj?: NS16187549871986683199.org.apache.arrow.flatbuf.Schema): NS16187549871986683199.org.apache.arrow.flatbuf.Schema | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new NS16187549871986683199.org.apache.arrow.flatbuf.Schema).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Block=} obj
+     * @returns {org.apache.arrow.flatbuf.Block}
+     */
+    dictionaries(index: number, obj?: org.apache.arrow.flatbuf.Block): org.apache.arrow.flatbuf.Block | null {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? (obj || new org.apache.arrow.flatbuf.Block).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    dictionariesLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Block=} obj
+     * @returns {org.apache.arrow.flatbuf.Block}
+     */
+    recordBatches(index: number, obj?: org.apache.arrow.flatbuf.Block): org.apache.arrow.flatbuf.Block | null {
+      let offset = this.bb.__offset(this.bb_pos, 10);
+      return offset ? (obj || new org.apache.arrow.flatbuf.Block).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    recordBatchesLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 10);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startFooter(builder: flatbuffers.Builder) {
+      builder.startObject(4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.MetadataVersion} version
+     */
+    static addVersion(builder: flatbuffers.Builder, version: NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion) {
+      builder.addFieldInt16(0, version, NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} schemaOffset
+     */
+    static addSchema(builder: flatbuffers.Builder, schemaOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, schemaOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} dictionariesOffset
+     */
+    static addDictionaries(builder: flatbuffers.Builder, dictionariesOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(2, dictionariesOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startDictionariesVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(24, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} recordBatchesOffset
+     */
+    static addRecordBatches(builder: flatbuffers.Builder, recordBatchesOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(3, recordBatchesOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startRecordBatchesVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(24, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endFooter(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} offset
+     */
+    static finishFooterBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
+      builder.finish(offset);
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Block {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Block}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Block {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * Index to the start of the RecordBlock (note this is past the Message header)
+     *
+     * @returns {flatbuffers.Long}
+     */
+    offset(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos);
+    }
+
+    /**
+     * Length of the metadata
+     *
+     * @returns {number}
+     */
+    metaDataLength(): number {
+      return this.bb.readInt32(this.bb_pos + 8);
+    }
+
+    /**
+     * Length of the data (this is aligned so there can be a gap between this and
+     * the metatdata).
+     *
+     * @returns {flatbuffers.Long}
+     */
+    bodyLength(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos + 16);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} offset
+     * @param {number} metaDataLength
+     * @param {flatbuffers.Long} bodyLength
+     * @returns {flatbuffers.Offset}
+     */
+    static createBlock(builder: flatbuffers.Builder, offset: flatbuffers.Long, metaDataLength: number, bodyLength: flatbuffers.Long): flatbuffers.Offset {
+      builder.prep(8, 24);
+      builder.writeInt64(bodyLength);
+      builder.pad(4);
+      builder.writeInt32(metaDataLength);
+      builder.writeInt64(offset);
+      return builder.offset();
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/arrow/blob/9cab3a2f/js/src/format/Message_generated.ts
----------------------------------------------------------------------
diff --git a/js/src/format/Message_generated.ts b/js/src/format/Message_generated.ts
new file mode 100644
index 0000000..daa781f
--- /dev/null
+++ b/js/src/format/Message_generated.ts
@@ -0,0 +1,469 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+import { flatbuffers } from 'flatbuffers';
+import * as NS16187549871986683199 from './Schema_generated';
+export namespace org.apache.arrow.flatbuf {
+  export import Schema = NS16187549871986683199.org.apache.arrow.flatbuf.Schema;
+}
+/**
+ * ----------------------------------------------------------------------
+ * The root Message type
+ * This union enables us to easily send different message types without
+ * redundant storage, and in the future we can easily add new message types.
+ *
+ * Arrow implementations do not need to implement all of the message types,
+ * which may include experimental metadata types. For maximum compatibility,
+ * it is best to send data using RecordBatch
+ *
+ * @enum
+ */
+export namespace org.apache.arrow.flatbuf {
+  export enum MessageHeader {
+    NONE = 0,
+    Schema = 1,
+    DictionaryBatch = 2,
+    RecordBatch = 3,
+    Tensor = 4
+  }
+}
+
+/**
+ * ----------------------------------------------------------------------
+ * Data structures for describing a table row batch (a collection of
+ * equal-length Arrow arrays)
+ * Metadata about a field at some level of a nested type tree (but not
+ * its children).
+ *
+ * For example, a List<Int16> with values [[1, 2, 3], null, [4], [5, 6], null]
+ * would have {length: 5, null_count: 2} for its List node, and {length: 6,
+ * null_count: 0} for its Int16 node, as separate FieldNode structs
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class FieldNode {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {FieldNode}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): FieldNode {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * The number of value slots in the Arrow array at this level of a nested
+     * tree
+     *
+     * @returns {flatbuffers.Long}
+     */
+    length(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos);
+    }
+
+    /**
+     * The number of observed nulls. Fields with null_count == 0 may choose not
+     * to write their physical validity bitmap out as a materialized buffer,
+     * instead setting the length of the bitmap buffer to 0.
+     *
+     * @returns {flatbuffers.Long}
+     */
+    nullCount(): flatbuffers.Long {
+      return this.bb.readInt64(this.bb_pos + 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} length
+     * @param {flatbuffers.Long} null_count
+     * @returns {flatbuffers.Offset}
+     */
+    static createFieldNode(builder: flatbuffers.Builder, length: flatbuffers.Long, null_count: flatbuffers.Long): flatbuffers.Offset {
+      builder.prep(8, 16);
+      builder.writeInt64(null_count);
+      builder.writeInt64(length);
+      return builder.offset();
+    }
+
+  }
+}
+/**
+ * A data header describing the shared memory layout of a "record" or "row"
+ * batch. Some systems call this a "row batch" internally and others a "record
+ * batch".
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class RecordBatch {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {RecordBatch}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): RecordBatch {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {RecordBatch=} obj
+     * @returns {RecordBatch}
+     */
+    static getRootAsRecordBatch(bb: flatbuffers.ByteBuffer, obj?: RecordBatch): RecordBatch {
+      return (obj || new RecordBatch).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * number of records / rows. The arrays in the batch should all have this
+     * length
+     *
+     * @returns {flatbuffers.Long}
+     */
+    length(): flatbuffers.Long {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+    }
+
+    /**
+     * Nodes correspond to the pre-ordered flattened logical schema
+     *
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.FieldNode=} obj
+     * @returns {org.apache.arrow.flatbuf.FieldNode}
+     */
+    nodes(index: number, obj?: org.apache.arrow.flatbuf.FieldNode): org.apache.arrow.flatbuf.FieldNode | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new org.apache.arrow.flatbuf.FieldNode).__init(this.bb.__vector(this.bb_pos + offset) + index * 16, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    nodesLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * Buffers correspond to the pre-ordered flattened buffer tree
+     *
+     * The number of buffers appended to this list depends on the schema. For
+     * example, most primitive arrays will have 2 buffers, 1 for the validity
+     * bitmap and 1 for the values. For struct arrays, there will only be a
+     * single buffer for the validity (nulls) bitmap
+     *
+     * @param {number} index
+     * @param {org.apache.arrow.flatbuf.Buffer=} obj
+     * @returns {org.apache.arrow.flatbuf.Buffer}
+     */
+    buffers(index: number, obj?: NS16187549871986683199.org.apache.arrow.flatbuf.Buffer): NS16187549871986683199.org.apache.arrow.flatbuf.Buffer | null {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? (obj || new NS16187549871986683199.org.apache.arrow.flatbuf.Buffer).__init(this.bb.__vector(this.bb_pos + offset) + index * 24, this.bb) : null;
+    }
+
+    /**
+     * @returns {number}
+     */
+    buffersLength(): number {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? this.bb.__vector_len(this.bb_pos + offset) : 0;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startRecordBatch(builder: flatbuffers.Builder) {
+      builder.startObject(3);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} length
+     */
+    static addLength(builder: flatbuffers.Builder, length: flatbuffers.Long) {
+      builder.addFieldInt64(0, length, builder.createLong(0, 0));
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} nodesOffset
+     */
+    static addNodes(builder: flatbuffers.Builder, nodesOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, nodesOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startNodesVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(16, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} buffersOffset
+     */
+    static addBuffers(builder: flatbuffers.Builder, buffersOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(2, buffersOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {number} numElems
+     */
+    static startBuffersVector(builder: flatbuffers.Builder, numElems: number) {
+      builder.startVector(24, numElems, 8);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endRecordBatch(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * ----------------------------------------------------------------------
+ * For sending dictionary encoding information. Any Field can be
+ * dictionary-encoded, but in this case none of its children may be
+ * dictionary-encoded.
+ * There is one vector / column per dictionary
+ *
+ *
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class DictionaryBatch {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {DictionaryBatch}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): DictionaryBatch {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {DictionaryBatch=} obj
+     * @returns {DictionaryBatch}
+     */
+    static getRootAsDictionaryBatch(bb: flatbuffers.ByteBuffer, obj?: DictionaryBatch): DictionaryBatch {
+      return (obj || new DictionaryBatch).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {flatbuffers.Long}
+     */
+    id(): flatbuffers.Long {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+    }
+
+    /**
+     * @param {org.apache.arrow.flatbuf.RecordBatch=} obj
+     * @returns {org.apache.arrow.flatbuf.RecordBatch|null}
+     */
+    data(obj?: org.apache.arrow.flatbuf.RecordBatch): org.apache.arrow.flatbuf.RecordBatch | null {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? (obj || new org.apache.arrow.flatbuf.RecordBatch).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startDictionaryBatch(builder: flatbuffers.Builder) {
+      builder.startObject(2);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} id
+     */
+    static addId(builder: flatbuffers.Builder, id: flatbuffers.Long) {
+      builder.addFieldInt64(0, id, builder.createLong(0, 0));
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} dataOffset
+     */
+    static addData(builder: flatbuffers.Builder, dataOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(1, dataOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endDictionaryBatch(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+  }
+}
+/**
+ * @constructor
+ */
+export namespace org.apache.arrow.flatbuf {
+  export class Message {
+    /**
+     * @type {flatbuffers.ByteBuffer}
+     */
+    bb: flatbuffers.ByteBuffer;
+
+    /**
+     * @type {number}
+     */
+    bb_pos: number = 0;
+    /**
+     * @param {number} i
+     * @param {flatbuffers.ByteBuffer} bb
+     * @returns {Message}
+     */
+    __init(i: number, bb: flatbuffers.ByteBuffer): Message {
+      this.bb_pos = i;
+      this.bb = bb;
+      return this;
+    }
+
+    /**
+     * @param {flatbuffers.ByteBuffer} bb
+     * @param {Message=} obj
+     * @returns {Message}
+     */
+    static getRootAsMessage(bb: flatbuffers.ByteBuffer, obj?: Message): Message {
+      return (obj || new Message).__init(bb.readInt32(bb.position()) + bb.position(), bb);
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.MetadataVersion}
+     */
+    version(): NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion {
+      let offset = this.bb.__offset(this.bb_pos, 4);
+      return offset ? /** @type {org.apache.arrow.flatbuf.MetadataVersion} */ (this.bb.readInt16(this.bb_pos + offset)) : NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1;
+    }
+
+    /**
+     * @returns {org.apache.arrow.flatbuf.MessageHeader}
+     */
+    headerType(): org.apache.arrow.flatbuf.MessageHeader {
+      let offset = this.bb.__offset(this.bb_pos, 6);
+      return offset ? /** @type {org.apache.arrow.flatbuf.MessageHeader} */ (this.bb.readUint8(this.bb_pos + offset)) : org.apache.arrow.flatbuf.MessageHeader.NONE;
+    }
+
+    /**
+     * @param {flatbuffers.Table} obj
+     * @returns {?flatbuffers.Table}
+     */
+    header<T extends flatbuffers.Table>(obj: T): T | null {
+      let offset = this.bb.__offset(this.bb_pos, 8);
+      return offset ? this.bb.__union(obj, this.bb_pos + offset) : null;
+    }
+
+    /**
+     * @returns {flatbuffers.Long}
+     */
+    bodyLength(): flatbuffers.Long {
+      let offset = this.bb.__offset(this.bb_pos, 10);
+      return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     */
+    static startMessage(builder: flatbuffers.Builder) {
+      builder.startObject(4);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.MetadataVersion} version
+     */
+    static addVersion(builder: flatbuffers.Builder, version: NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion) {
+      builder.addFieldInt16(0, version, NS16187549871986683199.org.apache.arrow.flatbuf.MetadataVersion.V1);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {org.apache.arrow.flatbuf.MessageHeader} headerType
+     */
+    static addHeaderType(builder: flatbuffers.Builder, headerType: org.apache.arrow.flatbuf.MessageHeader) {
+      builder.addFieldInt8(1, headerType, org.apache.arrow.flatbuf.MessageHeader.NONE);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} headerOffset
+     */
+    static addHeader(builder: flatbuffers.Builder, headerOffset: flatbuffers.Offset) {
+      builder.addFieldOffset(2, headerOffset, 0);
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Long} bodyLength
+     */
+    static addBodyLength(builder: flatbuffers.Builder, bodyLength: flatbuffers.Long) {
+      builder.addFieldInt64(3, bodyLength, builder.createLong(0, 0));
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @returns {flatbuffers.Offset}
+     */
+    static endMessage(builder: flatbuffers.Builder): flatbuffers.Offset {
+      let offset = builder.endObject();
+      return offset;
+    }
+
+    /**
+     * @param {flatbuffers.Builder} builder
+     * @param {flatbuffers.Offset} offset
+     */
+    static finishMessageBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {
+      builder.finish(offset);
+    }
+
+  }
+}